more alpha

This commit is contained in:
Steven Ettinger 2022-01-28 12:26:52 -03:00
parent ac81bb510a
commit 5ad9729697
9 changed files with 86 additions and 46 deletions

View File

@ -8,4 +8,4 @@ RUN npm install
COPY . .
CMD ["node", "index.js"]
CMD ["node", "docker-start.js"]

View File

@ -2,6 +2,7 @@
* Updated consensus mechanism for scaling
* Updated restarts for scaling mechanism
* New witness promotion routine
* Dockerized with own IPFS dependencies.
* Fast Replay
* Autonomous Multi-sig Account Management.
* Accurate Tracking of collateralized safety margins

View File

@ -25,8 +25,9 @@ const engineCrank = ENV.startingHash || '' //but this state will be inserted bef
const rta = ENV.rta || '' //rtrades account : IPFS pinning interface
const rtp = ENV.rtp || '' //rtrades password : IPFS pinning interface
var ipfshost = ENV.ipfshost || 'ipfs.infura.io' //IPFS upload/download provider provider
const ipfshost = ENV.ipfshost || 'ipfs.infura.io' //IPFS upload/download provider provider
const ipfsport = ENV.ipfsport || '5001' //IPFS upload/download provider provider
const ipfsprotocol = ENV.ipfsprotocol || 'https' //IPFS upload/download protocol
//node market config > 2500 is 25% inflation to node operators, this is currently not used
const bidRate = ENV.BIDRATE || 2500 //
@ -110,6 +111,8 @@ let config = {
rtp,
override,
ipfshost,
ipfsprotocol,
ipfsport,
starting_block,
prefix,
leader,

View File

@ -1,7 +1,6 @@
version: '3'
services:
node-1:
node-1:
ipfs:
# docker run -d --name ipfs_host -v $ipfs_staging:/export -v $ipfs_data:/data/ipfs -p 4001:4001 -p 127.0.0.1:8080:8080 -p 127.0.0.1:5001:5001 ipfs/go-ipfs:latest
image: ipfs/go-ipfs:latest
ports:
@ -17,34 +16,47 @@ services:
# target: /data/ipfs
- ./staging_dir:/export
# - ./data_dir:/data/ipfs
- node-1:/data/ipfs
node-2:
image: ipfs/go-ipfs:latest
build:
context: private-network
environment:
LIBP2P_FORCE_PNET: '1'
# SWARM_KEY: "/key/swarm/psk/1.0.0/\n/base16/\ne0e7b1394fb6e928eecf2f8db77eaa99d3657684dc939519f285cb902bd93e22"
volumes:
- node-2:/data/ipfs
- ./private-network/.ipfs/swarm.key:/data/ipfs/swarm.key
- ./private-network/init.sh:/usr/local/bin/start_ipfs
- ipfs:/data/ipfs
honeycomb:
# depends_on:
# - ipfs
build: .
restart: unless-stopped
ports:
- 8080
node-3:
build:
context: private-network
- "3001:80"
environment:
LIBP2P_FORCE_PNET: '1'
# SWARM_KEY: "/key/swarm/psk/1.0.0/\n/base16/\ne0e7b1394fb6e928eecf2f8db77eaa99d3657684dc939519f285cb902bd93e22"
SWARM_PEER: /ip4/172.18.0.2/tcp/4001/ipfs/QmUrp1E4ArW1ZDdFHnQrKfdH8tgGxM38hmPyjXEEoTmfto
volumes:
- node-3:/data/ipfs
- ./private-network/.ipfs/swarm.key:/data/ipfs/swarm.key
- ./private-network/init.sh:/usr/local/bin/start_ipfs
ports:
- 8080
- ipfshost=ipfs
- ipfsprotocol=http
- ipfsport=5001
stdin_open: true
tty: true
# node-2:
# image: ipfs/go-ipfs:latest
# build:
# context: private-network
# environment:
# LIBP2P_FORCE_PNET: '1'
# # SWARM_KEY: "/key/swarm/psk/1.0.0/\n/base16/\ne0e7b1394fb6e928eecf2f8db77eaa99d3657684dc939519f285cb902bd93e22"
# volumes:
# - node-2:/data/ipfs
# - ./private-network/.ipfs/swarm.key:/data/ipfs/swarm.key
# - ./private-network/init.sh:/usr/local/bin/start_ipfs
# ports:
# - 8080
# node-3:
# build:
# context: private-network
# environment:
# LIBP2P_FORCE_PNET: '1'
# # SWARM_KEY: "/key/swarm/psk/1.0.0/\n/base16/\ne0e7b1394fb6e928eecf2f8db77eaa99d3657684dc939519f285cb902bd93e22"
# SWARM_PEER: /ip4/172.18.0.2/tcp/4001/ipfs/QmUrp1E4ArW1ZDdFHnQrKfdH8tgGxM38hmPyjXEEoTmfto
# volumes:
# - node-3:/data/ipfs
# - ./private-network/.ipfs/swarm.key:/data/ipfs/swarm.key
# - ./private-network/init.sh:/usr/local/bin/start_ipfs
# ports:
# - 8080
volumes:
node-1:
node-2:
node-3:
ipfs:
# node-2:
# node-3:

9
docker-start.js Normal file
View File

@ -0,0 +1,9 @@
const fetch = require('node-fetch');
const spawn = require('child_process').spawn
ping()
function ping () {
fetch(`http://${process.env.ipfshost}:${process.env.ipfsport}/ping`)
.then(res => res.text())
.then(text => {console.log('Deploying:');spawn('node', ['index.js'], {stdio: 'inherit'})})
.catch(err => {console.log('Waiting for IPFS...');setTimeout(ping, 2000)});
}

0
docs/SETUP.md Normal file
View File

View File

@ -17,11 +17,13 @@ const args = require('minimist')(process.argv.slice(2));
const express = require('express');
const stringify = require('json-stable-stringify');
const IPFS = require('ipfs-api'); //ipfs-http-client doesn't work
const ipfs = new IPFS({
const fetch = require('node-fetch');
var ipfs = new IPFS({
host: config.ipfshost,
port: 5001,
protocol: 'https'
});
port: config.ipfsport,
protocol: config.ipfsprotocol
})
console.log(`IPFS: ${config.ipfshost == 'ipfs' ? 'DockerIPFS' : config.ipfshost}:${config.ipfsport}`)
exports.ipfs = ipfs;
const rtrades = require('./rtrades');
var Pathwise = require('./pathwise');
@ -110,7 +112,8 @@ const { dao, Liquidity } = require("./dao");
const { recast } = require('./lil_ops')
const hiveState = require('./processor');
const { getPathObj, getPathNum, getPathSome } = require('./getPathObj');
const { consolidate, sign, createAccount, updateAccount } = require('./msa')
const { consolidate, sign, createAccount, updateAccount } = require('./msa');
const { resolve } = require('path');
const api = express()
var http = require('http').Server(api);
var escrow = false;
@ -128,10 +131,11 @@ var live_dex = {}, //for feedback, unused currently
var recents = []
//HIVE API CODE
//Start Program Options
//Start Program Options
startWith('QmfYdmSKpy1SBR9w6qUpUGNUpfvo2Gezg86bXnsrPznpDg', true) //for testing and replaying 58859101
//dynStart(config.follow)
// API defs
api.use(API.https_redirect);
api.use(cors())
@ -208,6 +212,16 @@ if (config.rta && config.rtp) {
//starts block processor after memory has been loaded
function startApp() {
const ipfsp = config.ipfshost == 'ipfs' ? new IPFS({
host: config.ipfshost,
port: config.ipfsport,
protocol: config.ipfsprotocol
}) : ipfs
if(config.ipfshost == 'ipfs')ipfsp.id(function (err, res) {
if(err){}
if(res)plasma.id = res.id
console.log(res)
})
processor = hiveState(client, hive, startingBlock, 10, config.prefix, streamMode, cycleAPI);
processor.on('send', HR.send);
processor.on('claim', HR.claim);
@ -475,7 +489,7 @@ function startApp() {
block.ops = []
store.get([], function(err, obj) {
const blockState = Buffer.from(stringify([num, obj]))
ipfsSaveState(num, blockState)
ipfsSaveState(num, blockState, ipfsp)
.then(pla => {
block.root = pla.hashLastIBlock
plasma.hashSecIBlock = plasma.hashLastIBlock
@ -488,7 +502,7 @@ function startApp() {
} else if (num % 100 === 1) {
const blockState = Buffer.from(stringify([num, block]))
block.ops = []
ipfsSaveState(num, blockState)
ipfsSaveState(num, blockState, ipfsp)
.then(pla => {
block.chain.push({hash: pla.hashLastIBlock, hive_block: num})
plasma.hashSecIBlock = plasma.hashLastIBlock
@ -793,7 +807,8 @@ function startWith(hash, second) {
});
})
.catch(e=>{
console.log('error in ipfs', e)
process.exit()
})
} else {
startingBlock = config.starting_block
@ -855,7 +870,6 @@ function ipfspromise(hash){
return new Promise((resolve, reject) => {
ipfs.cat(hash, function(err, data) {
if (err) {
console.log(err)
reject(null)
} else {
resolve(data)

View File

@ -1,6 +1,6 @@
const { ipfs } = require("./index");
//const { ipfs } = require("./index");
exports.ipfsSaveState = (blocknum, buffer) => {
exports.ipfsSaveState = (blocknum, buffer, ipfs) => {
return new Promise((resolve, reject) => {
ipfs.add(buffer, (err, ipfs_return) => {
if (!err) {

View File

@ -8,7 +8,8 @@ function report(plas, con) {
let report = {
hash: plas.hashLastIBlock,
block: plas.hashBlock,
stash: plas.privHash
stash: plas.privHash,
ipfs_id: plas.id,
}
try {if(r.block > report.block){
report.sig = r.sig,