This commit is contained in:
Steven Ettinger 2022-01-27 14:25:21 -03:00
parent 4e09730511
commit 4845546a73
18 changed files with 585 additions and 292 deletions

View File

@ -1,4 +1,20 @@
## 1.2
* Updated consensus mechanism for scaling. X
* Updated restarts for scaling mechanism. X
* New witness promotion routine. X
* Fast Replay. O
* Autonomous Multi-sig Account Management. 1/2 --needs dao account update
* Accurate Tracking of collateralized safety margins X
* Enforcement of safety margins. O
* Added a claim routine to ease tax reporting. X
* Half of claims locked in gov or power. X
* Opened NFT minting to all accounts. X
* Fixed DEX history. X
* Fixed DEX sell loop for expired trades X
* Implemented multiple hive-js node selection X
### 1.1.3
* r6
* Remove stop callback for API failure. Rely on API Cycle only
* remove ecency from API List
* Remove ecency from API List
* Add release notes

View File

@ -31,8 +31,8 @@ var ipfshost = ENV.ipfshost || 'ipfs.infura.io' //IPFS upload/download provider
const bidRate = ENV.BIDRATE || 2500 //
//HIVE CONFIGS
var startURL = ENV.STARTURL || "https://api.deathwing.me/"
var clientURL = ENV.APIURL || "https://api.deathwing.me/"
var startURL = ENV.STARTURL || "https://rpc.ecency.com/"
var clientURL = ENV.APIURL || "https://rpc.ecency.com/"
const clients = ENV.clients || [
"https://api.deathwing.me/",
//"https://api.c0ff33a.uk/",
@ -56,11 +56,22 @@ const delegation = 'dlux-io' //account people can delegate to for rewards
const delegationWeight = 1000 //when to trigger community rewards with bens
const msaccount = 'dlux-cc' //account controlled by community leaders
const mainAPI = 'token.dlux.io' //leaders API probably
const mainRender = 'dluxdata.herokuapp.com' //data and render server
const mainFE = 'dlux.io' //frontend for content
const mainIPFS = 'a.ipfs.dlux.io' //IPFS service
const mainICO = 'robotolux' //Account collecting ICO HIVE
const footer = `\n[Find us on Discord](https://discord.gg/Beeb38j)`
const hive_service_fee = 100 //HIVE service fee for transactions in Hive/HBD in centipercents (1% = 100)
const features = {
pob: true, //proof of brain
delegate: true, //delegation
liquidity: true, //liquidity
ico: true, //ico
dex: true, //dex
nft: true, //nfts
state: true, //api dumps
claimdrop: false //claim drops
}
//Aditionally on your branch, look closely at dao, this is where tokenomics happen and custom status posts are made
@ -102,10 +113,12 @@ let config = {
mainAPI,
jsonTokenName,
mainFE,
mainRender,
mainIPFS,
mainICO,
footer,
hive_service_fee
hive_service_fee,
features
};
module.exports = config;

44
dao.js
View File

@ -25,6 +25,7 @@ function dao(num) {
});
}),
Pbals = getPathObj(['balances']),
Pcbals = getPathObj(['cbalances'])
Prunners = getPathObj(['runners']),
Pnodes = getPathObj(['markets', 'node']),
Pstats = getPathObj(['stats']),
@ -39,7 +40,7 @@ function dao(num) {
Ppaid = getPathObj(['paid']),
Prnfts = getPathObj(['rnfts']);
Pdistro = Distro()
Promise.all([Pnews, Pbals, Prunners, Pnodes, Pstats, Pdelegations, Pico, Pdex, Pbr, Ppbal, Pnomen, Pposts, Pfeed, Ppaid, Prnfts, Pdistro]).then(function(v) {
Promise.all([Pnews, Pbals, Prunners, Pnodes, Pstats, Pdelegations, Pico, Pdex, Pbr, Ppbal, Pnomen, Pposts, Pfeed, Ppaid, Prnfts, Pdistro, Pcbals]).then(function(v) {
daops.push({ type: 'del', path: ['postQueue'] });
daops.push({ type: 'del', path: ['br'] });
daops.push({ type: 'del', path: ['rolling'] });
@ -47,6 +48,7 @@ function dao(num) {
news = v[0] + '*****\n';
const header = post + news;
var bals = v[1],
cbals = v[16],
runners = v[2],
mnode = v[3],
stats = v[4],
@ -65,7 +67,7 @@ function dao(num) {
if(dist[i][0].split('div:')[1]){
addMT(['div', dist[i][0].split('div:')[1], 'b'], dist[i][1] )
} else {
bals[dist[i][0]] += dist[i][1]
cbals[dist[i][0]] ? cbals[dist[i][0]] += dist[i][1] : cbals[dist[i][0]] = dist[i][1]
}
}
feedKeys = Object.keys(feedCleaner);
@ -144,11 +146,7 @@ function dao(num) {
if(j){
for (var node in mnode) { //and pay them
i = parseInt(mnode[node].wins / j * b);
if (bals[node]) {
bals[node] += i;
} else {
bals[node] = i;
}
cbals[node] ? cbals[node] += i : cbals[node] = i;
bals.rn -= i;
const _at = _atfun(node);
if (i) {
@ -169,16 +167,13 @@ function dao(num) {
}
for (i in deles) { //reward vests
k = parseInt(b * deles[i] / j);
if (bals[i] === undefined) {
bals[i] = 0;
}
bals[i] += k;
cbals[i] ? cbals[i] += k : cbals[i] = k;
bals.rd -= k;
const _at = _atfun(i);
post = post + `* ${parseFloat(parseInt(k) / 1000).toFixed(3)} ${config.TOKEN} for ${_at}${i}'s ${parseFloat(deles[i] / 1000000).toFixed(1)} Mvests.\n`;
console.log(num + `:${k} ${config.TOKEN} awarded to ${i} for ${deles[i]} VESTS`);
}
stats.dluxPerDel = parseFloat(k / j).toFixed(6);
stats[`${config.jsonTokenName}PerDel`] = parseFloat(k / j).toFixed(6);
post = post + `*****\n ## ICO Status\n`;
if (bals.ri < 100000000 && stats.tokenSupply < 100000000000) {
stats.icoRound++;
@ -228,15 +223,12 @@ function dao(num) {
post = post + `### ICO Over Auction Results:\n${parseFloat(bals.rl / 1000).toFixed(3)} ${config.TOKEN} was set aside from today's ICO to divide between people who didn't get a chance at fixed price tokens and donated ${parseFloat(y / 1000).toFixed(3)} HIVE today.\n`;
for (i = 0; i < ico.length; i++) {
for (var node in ico[i]) {
if (!bals[node]) {
bals[node] = 0;
}
bals[node] += parseInt(ico[i][node] / y * bals.rl);
cbals[node] ? cbals[node] += parseInt(ico[i][node] / y * bals.rl) : cbals[node] = parseInt(ico[i][node] / y * bals.rl);
dailyICODistrobution -= parseInt(ico[i][node] / y * bals.rl);
post = post + `* @${node} awarded ${parseFloat(parseInt(ico[i][node] / y * bals.rl) / 1000).toFixed(3)} ${config.TOKEN} for ICO auction\n`;
console.log(num + `:${node} awarded ${parseInt(ico[i][node] / y * bals.rl)} ${config.TOKEN} for ICO auction`);
if (i == ico.length - 1) {
bals[node] += dailyICODistrobution;
cbals[node] ? cbals[node] += dailyICODistrobution : cbals[node] = dailyICODistrobution
post = post + `* @${node} awarded ${parseFloat(parseInt(dailyICODistrobution) / 1000).toFixed(3)} ${config.TOKEN} for ICO auction\n`;
console.log(num + `:${node} given ${dailyICODistrobution} remainder`);
}
@ -271,8 +263,8 @@ function dao(num) {
}
}
if (his.length) {
hi.o = parseFloat(his[0].rate); // open, close, top bottom, dlux, volumepair
hi.c = parseFloat(his[his.length - 1].rate);
hi.o = parseFloat(his[0].price); // open, close, top bottom, dlux, volumepair
hi.c = parseFloat(his[his.length - 1].price);
hi.t = 0;
hi.b = hi.o;
hi.d = 0;
@ -286,15 +278,15 @@ function dao(num) {
}
hi.v += parseInt(his[int].target_vol);
hi.d += parseInt(his[int].amount);
hi.d += parseInt(his[int].base_vol);
}
if (!dex.hive.days)
dex.hive.days = {};
dex.hive.days[num] = hi;
}
if (hisb.length) {
hib.o = parseFloat(hisb[0].rate); // open, close, top bottom, dlux, volumepair
hib.c = parseFloat(hisb[hisb.length - 1].rate);
hib.o = parseFloat(hisb[0].price); // open, close, top bottom, dlux, volumepair
hib.c = parseFloat(hisb[hisb.length - 1].price);
hib.t = 0;
hib.b = hib.o;
hib.v = 0;
@ -307,7 +299,7 @@ function dao(num) {
hib.b = parseFloat(hisb[int].price);
}
hib.v += parseInt(hisb[int].target_vol);
hib.d += parseInt(hisb[int].amount);
hib.d += parseInt(hisb[int].base_vol);
}
if (!dex.hbd.days)
dex.hbd.days = {};
@ -348,8 +340,9 @@ function dao(num) {
var dif = bucket;
for (var j in br[i].post.voters) {
bals[br[i].post.author] += parseInt((br[i].post.voters[j].weight * 2 / q * 3) * compa);
cbals[br[i].post.author] ? cbals[br[i].post.author] += parseInt((br[i].post.voters[j].weight * 2 / q * 3) * compa) : cbals[br[i].post.author] = parseInt((br[i].post.voters[j].weight * 2 / q * 3) * compa);
bucket -= parseInt((br[i].post.voters[j].weight / q * 3) * compa);
bals[br[i].post.voters[j].from] += parseInt((br[i].post.voters[j].weight / q * 3) * compa);
cbals[br[i].post.voters[j].from] ? cbals[br[i].post.voters[j].from] += parseInt((br[i].post.voters[j].weight / q * 3) * compa) : cbals[br[i].post.voters[j].from] = parseInt((br[i].post.voters[j].weight / q * 3) * compa);
bucket -= parseInt((br[i].post.voters[j].weight * 2 / q * 3) * compa);
}
vo.push(br[i].post);
@ -394,7 +387,7 @@ function dao(num) {
cpost[`s/${vo[oo].author}/${vo[oo].permlink}`].b = weight;
hiveVotes = hiveVotes + `* [${vo[oo].title || `${config.TOKEN} Content`}](https://www.${config.mainFE}/@${vo[oo].author}/${vo[oo].permlink}) by @${vo[oo].author} | ${parseFloat(weight / 100).toFixed(2)}% \n`;
}
const footer = `[Visit ${config.mainFE}](https://www.${config.mainFE})\n[Visit our DEX/Wallet](https://www.${config.mainFE}/dex)\n[Learn how to use ${config.TOKEN}](https://github.com/dluxio/dluxio/wiki)\n[Stop @ Mentions - HiveSigner](https://hivesigner.com/sign/custom-json?authority=posting&required_auths=0&id=${config.prefix}nomention&json=%7B%22nomention%22%3Atrue%7D)\n${config.footer}`;
const footer = `[Visit ${config.mainFE}](https://${config.mainFE})\n[Visit our DEX/Wallet](https://${config.mainFE}/dex)\n[Learn how to use ${config.TOKEN}](https://github.com/dluxio/dluxio/wiki)\n[Stop @ Mentions - HiveSigner](https://hivesigner.com/sign/custom-json?authority=posting&required_auths=0&id=${config.prefix}nomention&json=%7B%22nomention%22%3Atrue%7D)\n${config.footer}`;
if (hiveVotes)
hiveVotes = `#### Community Voted ${config.TOKEN} Posts\n` + hiveVotes + `*****\n`;
post = header + contentRewards + hiveVotes + post + footer;
@ -414,6 +407,7 @@ function dao(num) {
daops.push({ type: 'put', path: ['dex'], data: dex });
daops.push({ type: 'put', path: ['stats'], data: stats });
daops.push({ type: 'put', path: ['balances'], data: bals });
daops.push({ type: 'put', path: ['cbalances'], data: cbals });
daops.push({ type: 'put', path: ['posts'], data: cpost });
daops.push({ type: 'put', path: ['markets', 'node'], data: mnode });
daops.push({ type: 'put', path: ['delegations'], data: deles });

View File

@ -25,10 +25,10 @@ exports.contentToDiscord = (author, permlink) => {
.then(result => {
r = result.result
const embed = new MessageBuilder()
.setTitle('New DLUX content!')
.setAuthor(author, 'https://cdn.discordapp.com/embed/avatars/0.png', `https://www.dlux.io/@${author}`)
.setURL(`https://www.dlux.io/dlux/@${author}/${permlink}`)
.addField(r.title, (JSON.parse(r.json_metadata).description || 'View this on dlux.io'), true)
.setTitle(`New ${config.TOKEN} content!`)
.setAuthor(author, 'https://cdn.discordapp.com/embed/avatars/0.png', `https://${config.mainFE}/@${author}`)
.setURL(`https://${config.mainFE}/${config.tag}/@${author}/${permlink}`)
.addField(r.title, (JSON.parse(r.json_metadata).description || `View this on ${config.mainFE}`), true)
//.addField('Second field', 'this is not inline')
.setColor('#00b0f4')
//.setThumbnail('https://cdn.discordapp.com/embed/avatars/0.png')
@ -46,14 +46,14 @@ exports.contentToDiscord = (author, permlink) => {
exports.renderNFTtoDiscord = (script, uid, owner, set) => {
const embed = new MessageBuilder()
.setTitle(`New ${set} NFT minted!`)
.setAuthor(owner, 'https://cdn.discordapp.com/embed/avatars/0.png', `https://www.dlux.io/@${owner}`)
.setURL(`https://www.dlux.io/@${owner}#inventory/`)
.addField(`${set}:${uid}`, 'View this on dlux.io', true)
.setAuthor(owner, 'https://cdn.discordapp.com/embed/avatars/0.png', `https://${config.mainFE}/@${owner}`)
.setURL(`https://${config.mainFE}/@${owner}#inventory/`)
.addField(`${set}:${uid}`, `View this on ${config.mainFE}`, true)
//.addField('Second field', 'this is not inline')
.setColor('#00b0f4')
//.setThumbnail('https://cdn.discordapp.com/embed/avatars/0.png')
//.setDescription('Oh look a description :)')
.setImage(`https://dluxdata.herokuapp.com/render/${script}/${uid}`)
.setImage(`https://${config.mainRender}/render/${script}/${uid}`)
//.setFooter('Hey its a footer', 'https://cdn.discordapp.com/embed/avatars/0.png')
.setTimestamp();

302
index.js
View File

@ -1,12 +1,18 @@
const config = require('./config');
const VERSION = 'v1.1.3r6'
const VERSION = 'v1.2.0'
exports.VERSION = VERSION
exports.exit = exit;
exports.processor = processor;
const hive = require('@hiveio/dhive');
var client = new hive.Client(config.clientURL);
exports.client = client
var block = {
ops:[],
root: '',
prev_root: '',
chain:[]
}
exports.block = block
const args = require('minimist')(process.argv.slice(2));
const express = require('express');
const stringify = require('json-stable-stringify');
@ -123,8 +129,8 @@ var recents = []
//HIVE API CODE
//Start Program Options
//startWith('QmaiS2MmGG1w3Ub914nTbeMiTE6CLppC1FNpeUfCMXWmcT', true) //for testing and replaying 58859101
dynStart(config.follow)
startWith('QmfYdmSKpy1SBR9w6qUpUGNUpfvo2Gezg86bXnsrPznpDg', true) //for testing and replaying 58859101
//dynStart(config.follow)
// API defs
api.use(API.https_redirect);
@ -132,57 +138,65 @@ api.use(cors())
api.get('/', API.root);
api.get('/stats', API.root);
api.get('/coin', API.coin);
api.get('/state', API.state); //Do not recommend having a state dump in a production API
api.get('/dex', API.dex);
api.get('/api/tickers', API.tickers);
api.get('/api/orderbook', API.orderbook);
api.get('/api/orderbook/:ticker_id', API.orderbook);
api.get('/api/pairs', API.pairs);
api.get('/api/historical', API.historical_trades);
api.get('/api/historical/:ticker_id', API.historical_trades);
api.get('/api/recent/:ticker_id', API.chart);
api.get('/@:un', API.user);
api.get('/api/mirrors', API.mirrors);
api.get('/api/coin_detail', API.detail);
api.get('/api/nfts/:user', API.nfts);
api.get('/api/nft/:set/:item', API.item);
api.get('/api/sets', API.sets);
api.get('/api/set/:set', API.set);
api.get('/api/auctions', API.auctions);
api.get('/api/auctions/:set', API.auctions);
api.get('/api/mintauctions', API.mint_auctions);
api.get('/api/mintauctions/:set', API.mint_auctions);
api.get('/api/sales', API.sales);
api.get('/api/sales/:set', API.sales);
api.get('/api/mintsales', API.mint_sales);
api.get('/api/mintsales/:set', API.mint_sales);
api.get('/api/mintsupply', API.mint_supply);
api.get('/api/mintsupply/:set', API.mint_supply);
api.get('/api/pfp/:user', API.official);
api.get('/api/trades/:kind/:user', API.limbo);
api.get('/@:un', API.user);
api.get('/blog/@:un', API.blog);
api.get('/dapps/@:author', API.getAuthorPosts);
api.get('/dapps/@:author/:permlink', API.getPost);
api.get('/new', API.getNewPosts);
api.get('/trending', API.getTrendingPosts);
api.get('/promoted', API.getPromotedPosts);
api.get('/report/:un', API.report); // probably not needed
api.get('/markets', API.markets); //for finding node runner and tasks information
api.get('/posts/:author/:permlink', API.PostAuthorPermlink);
api.get('/posts', API.posts); //votable posts
api.get('/feed', API.feed); //all side-chain transaction in current day
api.get('/runners', API.runners); //list of accounts that determine consensus... will also be the multi-sig accounts
api.get('/queue', API.queue);
api.get('/api/protocol', API.protocol);
api.get('/api/status/:txid', API.status);
api.get('/pending', API.pending); // The transaction signer now can sign multiple actions per block and this is nearly always empty, still good for troubleshooting
// Some HIVE APi is wrapped here to support a stateless frontend built on the cheap with dreamweaver
// None of these functions are required for token functionality and should likely be removed from the community version
api.get('/api/:api_type/:api_call', API.hive_api);
api.get('/hapi/:api_type/:api_call', API.hive_api);
api.get('/getwrap', API.getwrap);
api.get('/getauthorpic/:un', API.getpic);
api.get('/getblog/:un', API.getblog);
if(config.features.state){
api.get('/state', API.state); //Do not recommend having a state dump in a production API
api.get('/pending', API.pending); // The transaction signer now can sign multiple actions per block and this is nearly always empty, still good for troubleshooting
// Some HIVE APi is wrapped here to support a stateless frontend built on the cheap with dreamweaver
// None of these functions are required for token functionality and should likely be removed from the community version
api.get('/api/:api_type/:api_call', API.hive_api);
api.get('/hapi/:api_type/:api_call', API.hive_api);
api.get('/getwrap', API.getwrap);
api.get('/getauthorpic/:un', API.getpic);
api.get('/getblog/:un', API.getblog);
}
if(config.features.dex){
api.get('/dex', API.dex);
api.get('/api/tickers', API.tickers);
api.get('/api/orderbook', API.orderbook);
api.get('/api/orderbook/:ticker_id', API.orderbook);
api.get('/api/pairs', API.pairs);
api.get('/api/historical', API.historical_trades);
api.get('/api/historical/:ticker_id', API.historical_trades);
api.get('/api/recent/:ticker_id', API.chart);
}
if(config.features.nft){
api.get('/api/nfts/:user', API.nfts);
api.get('/api/nft/:set/:item', API.item);
api.get('/api/sets', API.sets);
api.get('/api/set/:set', API.set);
api.get('/api/auctions', API.auctions);
api.get('/api/auctions/:set', API.auctions);
api.get('/api/mintauctions', API.mint_auctions);
api.get('/api/mintauctions/:set', API.mint_auctions);
api.get('/api/sales', API.sales);
api.get('/api/sales/:set', API.sales);
api.get('/api/mintsales', API.mint_sales);
api.get('/api/mintsales/:set', API.mint_sales);
api.get('/api/mintsupply', API.mint_supply);
api.get('/api/mintsupply/:set', API.mint_supply);
api.get('/api/pfp/:user', API.official);
api.get('/api/trades/:kind/:user', API.limbo);
}
if(config.features.pob){
api.get('/blog/@:un', API.blog);
api.get('/dapps/@:author', API.getAuthorPosts);
api.get('/dapps/@:author/:permlink', API.getPost);
api.get('/new', API.getNewPosts);
api.get('/trending', API.getTrendingPosts);
api.get('/promoted', API.getPromotedPosts);
api.get('/posts/:author/:permlink', API.PostAuthorPermlink);
api.get('/posts', API.posts); //votable posts
}
http.listen(config.port, function() {
console.log(`${config.TOKEN} token API listening on port ${config.port}`);
@ -196,56 +210,66 @@ if (config.rta && config.rtp) {
function startApp() {
processor = hiveState(client, hive, startingBlock, 10, config.prefix, streamMode, cycleAPI);
processor.on('send', HR.send);
processor.on('power_up', HR.power_up); // power up tokens for vote power in layer 2 token proof of brain
processor.on('power_down', HR.power_down);
processor.on('power_grant', HR.power_grant);
processor.on('vote_content', HR.vote_content);
processor.on('dex_sell', HR.dex_sell);
processor.on('dex_clear', HR.dex_clear);
processor.on('gov_down', HR.gov_down);
processor.on('gov_up', HR.gov_up);
processor.on('claim', HR.claim);
processor.on('node_add', HR.node_add);
processor.on('node_delete', HR.node_delete);
processor.on('report', HR.report);
processor.on('gov_down', HR.gov_down);
processor.on('gov_up', HR.gov_up);
processor.onOperation('account_update', HR.account_update);
processor.onOperation('comment', HR.comment);
processor.on('queueForDaily', HR.q4d)
processor.on('nomention', HR.nomention)
processor.on('ft_bid', HR.ft_bid)
processor.on('ft_auction', HR.ft_auction)
processor.on('ft_sell_cancel', HR.ft_sell_cancel)
processor.on('ft_buy', HR.ft_buy)
processor.on('ft_sell', HR.ft_sell)
processor.on('ft_escrow_cancel', HR.ft_escrow_cancel)
processor.on('ft_escrow_complete', HR.ft_escrow_complete)
processor.on('ft_escrow', HR.ft_escrow)
processor.on('fts_sell_h', HR.fts_sell_h)
processor.on('fts_sell_hcancel', HR.fts_sell_hcancel)
processor.on('nft_buy', HR.nft_buy)
processor.on('nft_sell', HR.nft_sell)
processor.on('nft_sell_cancel', HR.nft_sell_cancel)
processor.on('ft_transfer', HR.ft_transfer)
processor.on('ft_airdrop', HR.ft_airdrop)
processor.on('nft_transfer', HR.nft_transfer)
processor.on('nft_auction', HR.nft_auction)
processor.on('nft_hauction', HR.nft_hauction)
processor.on('nft_bid', HR.nft_bid)
processor.on('nft_transfer_cancel', HR.nft_transfer_cancel)
processor.on('nft_reserve_transfer', HR.nft_reserve_transfer)
processor.on('nft_reserve_complete', HR.nft_reserve_complete)
processor.on('nft_define', HR.nft_define)
processor.on('nft_add_roy', HR.nft_add_roy)
processor.on('nft_div', HR.nft_div)
processor.on('nft_define_delete', HR.nft_define_delete)
processor.on('nft_melt', HR.nft_delete)
processor.on('nft_mint', HR.nft_mint)
processor.on('nft_pfp', HR.nft_pfp)
processor.onOperation('comment_options', HR.comment_options);
processor.on('cjv', HR.cjv);
processor.on('sig_submit', HR.sig_submit); //dlux is for putting executable programs into IPFS... this is for additional accounts to sign the code as non-malicious
processor.on('cert', HR.cert); // json.cert is an open ended hope to interact with executable posts... unexplored
processor.onOperation('vote', HR.vote) //layer 2 voting
processor.onOperation('transfer', HR.transfer);
processor.onOperation('delegate_vesting_shares', HR.delegate_vesting_shares);
processor.onOperation('comment', HR.comment);
if(config.features.pob){
processor.on('power_up', HR.power_up); // power up tokens for vote power in layer 2 token proof of brain
processor.on('power_down', HR.power_down);
processor.on('power_grant', HR.power_grant);
processor.on('vote_content', HR.vote_content);
processor.onOperation('vote', HR.vote) //layer 2 voting
processor.onOperation('delegate_vesting_shares', HR.delegate_vesting_shares);
processor.onOperation('comment_options', HR.comment_options);
processor.on('cjv', HR.cjv);
processor.on('cert', HR.cert); // json.cert is an open ended hope to interact with executable posts... unexplored
}
if(config.features.dex){
processor.on('dex_sell', HR.dex_sell);
processor.on('dex_clear', HR.dex_clear);
processor.on('sig_submit', HR.sig_submit); //dlux is for putting executable programs into IPFS... this is for additional accounts to sign the code as non-malicious
}
if(config.features.dex || config.features.nft || config.features.ico){
processor.onOperation('transfer', HR.transfer);
}
if(config.features.nft){
processor.on('ft_bid', HR.ft_bid)
processor.on('ft_auction', HR.ft_auction)
processor.on('ft_sell_cancel', HR.ft_sell_cancel)
processor.on('ft_buy', HR.ft_buy)
processor.on('ft_sell', HR.ft_sell)
processor.on('ft_escrow_cancel', HR.ft_escrow_cancel)
processor.on('ft_escrow_complete', HR.ft_escrow_complete)
processor.on('ft_escrow', HR.ft_escrow)
processor.on('fts_sell_h', HR.fts_sell_h)
processor.on('fts_sell_hcancel', HR.fts_sell_hcancel)
processor.on('nft_buy', HR.nft_buy)
processor.on('nft_sell', HR.nft_sell)
processor.on('nft_sell_cancel', HR.nft_sell_cancel)
processor.on('ft_transfer', HR.ft_transfer)
processor.on('ft_airdrop', HR.ft_airdrop)
processor.on('nft_transfer', HR.nft_transfer)
processor.on('nft_auction', HR.nft_auction)
processor.on('nft_hauction', HR.nft_hauction)
processor.on('nft_bid', HR.nft_bid)
processor.on('nft_transfer_cancel', HR.nft_transfer_cancel)
processor.on('nft_reserve_transfer', HR.nft_reserve_transfer)
processor.on('nft_reserve_complete', HR.nft_reserve_complete)
processor.on('nft_define', HR.nft_define)
processor.on('nft_add_roy', HR.nft_add_roy)
processor.on('nft_div', HR.nft_div)
processor.on('nft_define_delete', HR.nft_define_delete)
processor.on('nft_melt', HR.nft_delete)
processor.on('nft_mint', HR.nft_mint)
processor.on('nft_pfp', HR.nft_pfp)
}
//do things in cycles based on block time
processor.onBlock(
function (num, pc, prand, bh) {
@ -430,6 +454,8 @@ function startApp() {
}
if ((num - 20003) % 30240 === 0) { //time for daily magic
promises.push(dao(num))
block.prev_root = block.root
block.root = ''
}
if (num % 100 === 0) {
promises.push(tally(num, plasma, processor.isStreaming()));
@ -443,11 +469,15 @@ function startApp() {
Promise.all(promises).then(()=>resolve(pc))
})
}
if (num % 100 === 1) {
if (num % 100 === 1 && !block.root) {
block.root = 'pending'
block.chain = []
block.ops = []
store.get([], function(err, obj) {
const blockState = Buffer.from(stringify([num, obj]))
ipfsSaveState(num, blockState)
.then(pla => {
block.root = pla.hashLastIBlock
plasma.hashSecIBlock = plasma.hashLastIBlock
plasma.hashLastIBlock = pla.hashLastIBlock
plasma.hashBlock = pla.hashBlock
@ -455,6 +485,17 @@ function startApp() {
.catch(e => { console.log(e) })
})
} else if (num % 100 === 1) {
const blockState = Buffer.from(stringify([num, block]))
block.ops = []
ipfsSaveState(num, blockState)
.then(pla => {
block.chain.push({hash: pla.hashLastIBlock, hive_block: num})
plasma.hashSecIBlock = plasma.hashLastIBlock
plasma.hashLastIBlock = pla.hashLastIBlock
plasma.hashBlock = pla.hashBlock
})
.catch(e => { console.log(e) })
}
if (config.active && processor.isStreaming() ) {
store.get(['escrow', config.username], function(e, a) {
@ -628,24 +669,26 @@ function startWith(hash, second) {
console.log(`${hash} inserted`)
if (hash) {
console.log(`Attempting to start from IPFS save state ${hash}`);
ipfs.cat(hash, (err, file) => {
ipfspromise(hash).then(blockInfo=>{
var blockinfo = JSON.parse(blockInfo);
ipfs.cat(blockinfo[1].root ? blockinfo[1].root : hash, (err, file) => {
if (!err) {
var data = JSON.parse(file);
startingBlock = data[0]
block.root = blockinfo[1].root ? blockinfo[1].root : hash
block.prev_root = data[1].prev_root ? data[1].prev_root : data[1].stats.root
console.log('root', block.root)
if (!startingBlock) {
startWith(sh)
} else {
/*
plasma.hashBlock = data[0]
plasma.hashLastIBlock = hash
*/
store.del([], function(e) {
if (!e && (second || data[0] > API.RAM.head - 325)) {
if (hash) {
var cleanState = data[1]
// cleanState.runners = {
// disrgardfiat: 1,
// ['dlux-io']: 1,
// markegiles: 1
// }
store.put([], cleanState, function(err) {
if (err) {
console.log('errr',err)
@ -660,7 +703,12 @@ function startWith(hash, second) {
}
}
})
startApp()
if(blockinfo[1].chain){rundelta(blockinfo[1].chain, blockinfo[1].ops, blockinfo[0], blockinfo[1].prev_root)
.then(empty=>startApp())
.catch(e=>console.log('Failure of rundelta'))
} else {
startApp()
}
}
})
} else {
@ -680,13 +728,13 @@ function startWith(hash, second) {
for( var runner in data[1].runners) {
promises.push(new Promise((resolve, reject) => {
console.log('runner', runner)
hiveClient.api.setOptions({ url: config.startURL });
hiveClient.api.getAccountHistory(runner, -1, 100, ...walletOperationsBitmask, function(err, result) {
var recents = {block:0}
if (err) {
console.log('error in retrieval')
resolve({hash:null,block:null})
} else {
//hiveClient.api.setOptions({ url: config.clientURL });
let ebus = result.filter(tx => tx[1].op[1].id === `${config.prefix}report`)
for (i = ebus.length - 1; i >= 0; i--) {
if (JSON.parse(ebus[i][1].op[1].json).hash) {
@ -713,6 +761,7 @@ function startWith(hash, second) {
}))
}
Promise.all(promises).then(values =>{
hiveClient.api.setOptions({ url: config.clientURL });
var newest = 0, votes = {}, blocks = {}
for(var acc in values){
if(values[acc].block >= newest && !votes[values[acc].hash]){
@ -742,6 +791,10 @@ function startWith(hash, second) {
console.log(`${hash} failed to load, Replaying from genesis.\nYou may want to set the env var STARTHASH\nFind it at any token API such as ${config.mainAPI}`)
}
});
})
.catch(e=>{
})
} else {
startingBlock = config.starting_block
store.del([], function(e) {
@ -760,4 +813,53 @@ function startWith(hash, second) {
})
})
}
}
function rundelta(arr, ops, sb, pr){
return new Promise((resolve, reject) => {
var promises = []
for (var i = 0; i < arr.length; i++){
promises.push(ipfspromise(arr[i].hash))
plasma.hashBlock = arr[i].hive_block
plasma.hashLastIBlock = arr[i].hash
}
Promise.all(promises).then(values => {
delta(values)
function delta(a){
if(a.length){
const b = JSON.parse(a.shift())
startingBlock = b[0]
store.batch(unwrapOps(b[1].ops), [delta, reject, a ? a : []])
} else {
block.ops = []
block.chain = arr
block.prev_root = pr
startingBlock = sb
store.batch(unwrapOps(ops), [resolve, reject, 'OK'])
}
}
})
.catch(e=>reject(e))
})
}
function unwrapOps(arr){
var c = []
for(var i = 0; i < arr.length; i++){
c.push(JSON.parse(arr[i]))
}
return c
}
function ipfspromise(hash){
return new Promise((resolve, reject) => {
ipfs.cat(hash, function(err, data) {
if (err) {
console.log(err)
reject(null)
} else {
resolve(data)
}
})
})
}

View File

@ -65,6 +65,22 @@ const add = (node, amount) => {
}
exports.add = add
const addc = (node, amount) => {
return new Promise((resolve, reject) => {
store.get(['cbalances', node], function(e, a) {
if (!e) {
console.log(amount + ' to ' + node)
const a2 = typeof a != 'number' ? amount : a + amount
console.log('final balance ' +a2)
store.batch([{ type: 'put', path: ['cbalances', node], data: a2 }], [resolve, reject, 1])
} else {
console.log(e)
}
})
})
}
exports.addc = addc
const addMT = (path, amount) => {
return new Promise((resolve, reject) => {
store.get(path, function(e, a) {

View File

@ -10,9 +10,10 @@
},
"keywords": [
"dlux",
"steem",
"steem-state",
"token"
"hive",
"nft",
"token",
"dex"
],
"author": "disregardfiat",
"license": "MIT",

View File

@ -5,7 +5,7 @@ var type = require('component-type');
var after = require('after');
var streamToArray = require('stream-to-array');
const stringify = require('json-stable-stringify');
var { block } = require('./index')
module.exports = Pathwise;
function Pathwise(db) {
@ -60,6 +60,7 @@ Pathwise.prototype.batch = function(ops, pc) { // promise chain[resolve(), rejec
}
});
ops.forEach(function(op) {
block.ops.push(stringify({type: op.type, path: op.path, data: op.data}))
if (op.type == 'put') self.put(op.path, op.data, { batch: batch }, next)
else if (op.type == 'del') self.del(op.path, { batch: batch }, next);
});

View File

@ -9,7 +9,7 @@ exports.comment = (json, pc) => {
let meta = {}
try { meta = JSON.parse(json.json_metadata) } catch (e) {}
let community_post = false
if (json.author == config.leader && parseInt(json.permlink.split('dlux')[1]) > json.block_num - 31000) {
if (json.author == config.leader && parseInt(json.permlink.split(config.tag)[1]) > json.block_num - 31000) {
//console.log('leader post')
store.get(['escrow', json.author], function(e, a) {
if (!e) {
@ -25,7 +25,7 @@ exports.comment = (json, pc) => {
console.log(e)
}
})
} else if (meta.arHash || meta.vrHash || meta.appHash || meta.audHash) {
} else if (config.features.pob && meta.arHash || meta.vrHash || meta.appHash || meta.audHash) {
Ppost = getPathObj(['posts', `${json.author}/${json.permlink}`])
Promise.all([Ppost])
.then(postarray => {

View File

@ -206,7 +206,7 @@ exports.dex_sell = (json, from, active, pc) => {
}
exports.transfer = (json, pc) => {
if (json.to == config.mainICO && json.amount.split(' ')[1] == 'HIVE' && json.from != config.msaccount) { //the ICO disribution... should be in multi sig account
if (config.features.ico && json.to == config.mainICO && json.amount.split(' ')[1] == 'HIVE' && json.from != config.msaccount) { //the ICO disribution... should be in multi sig account
const amount = parseInt(parseFloat(json.amount) * 1000)
var purchase,
Pstats = getPathObj(['stats']),
@ -260,7 +260,7 @@ exports.transfer = (json, pc) => {
store.batch(ops, pc)
}
})
} else if (json.to == config.msaccount && json.from != config.mainICO) {
} else if ((config.features.dex || config.features.nft) && json.to == config.msaccount && json.from != config.mainICO) {
if(json.memo.split(' ').length > 1 && json.memo.split(' ')[0] == 'NFT'){
/*
lth[`set:hash`]{
@ -834,7 +834,7 @@ exports.transfer = (json, pc) => {
store.batch(ops, pc)
}
}
} else if (json.from == config.msaccount){
} else if (config.features.dex && json.from == config.msaccount){
getPathObj(['mss']).then(mss => {
var done = false
for (var block in mss){

View File

@ -1,11 +1,11 @@
const { onStreamingStart } = require('./onStreamingStart')
const { send } = require('./send')
const { send, claim } = require('./send')
const { gov_up, gov_down } = require('./gov')
const { power_up, power_down, power_grant } = require('./power')
const { delegate_vesting_shares } = require('./delegate_vesting_shares')
const { vote } = require('./vote')
const { cert } = require('./cert')
const { sig_submit } = require('./sig')
const { sig_submit, account_update } = require('./sig')
const { cjv } = require('./cjv')
const { nomention } = require('./nomention')
const { q4d } = require('./q4d')
@ -76,6 +76,7 @@ module.exports = {
cjv,
comment,
comment_options,
account_update,
delegate_vesting_shares,
dex_clear,
dex_sell,
@ -91,6 +92,7 @@ module.exports = {
q4d,
report,
send,
claim,
sig_submit,
transfer,
vote

View File

@ -1,85 +0,0 @@
//multi-sig ops
/*
processor.onOperation('account_update', function(json, pc) { //ensure proper keys are on record for DAO accounts
let agentsP = getPathObj(['agents']),
statsP = getPathObj(['stats']),
keysP = getPathObj(['keyPairs'])
Promise.all([agentsP, statsP, keysP])
.then(a => {
let agents = a[0],
stats = a[1],
keyPairs = a[2],
ops = []
if (json.account == config.msaccount) {
stats.auths = {}
for (var agent in agents) {
agents[agent].o = 0
}
for (var i = 0; i < json.owner.key_auths.length; i++) {
stats.auth[json.owner.key_auths[i][0]] = 1
agents[keyPairs[json.owner.key_auths[i][0]]].o = 1
}
//auto update active public keys
ops.push({ type: 'put', path: ['stats'], data: stats })
ops.push({ type: 'put', path: ['agents'], data: agents })
console.log(ops);
store.batch(ops, pc)
} else if (agents[json.account] != null && json.active != null) {
ops.push({ type: 'put', path: ['agents', json.account, 'p'], data: json.active.key_auths[0][0] }) //keep record of public keys of agents
ops.push({ type: 'put', path: ['keyPairs', json.active.key_auths[0][0]], data: json.account })
console.log(ops);
store.batch(ops, pc)
} else {
pc[0](pc[2])
}
})
.catch(e => { console.log(e) })
});
processor.onOperation('claim_account', function(json, pc) {
getPathObj(['agents', json.creator])
.then(re => {
let r = re,
ops = []
if (Object.keys(r).length) { //adjust inventories
r.i++
ops.push({ type: 'put', path: ['agents', json.creator], data: r })
ops.push({ type: 'put', path: ['feed', `${json.block_num}:${json.transaction_id}`], data: `${json.creator} claimed an ACT` })
console.log({ msg: 'claim', ops });
store.batch(ops, pc)
} else {
pc[0](pc[2])
}
})
.catch(e => { console.log(e) })
});
processor.onOperation('create_claimed_account', function(json, pc) {
let agentP = getPathObj(['agents', json.creator]),
conP = getPathObj(['contracts', json.creator, json.new_account_name + ':c'])
Promise.all([agentP, conP])
.then(a => {
let r = a[0],
con = a[1],
ops = []
if (Object.keys(r).length) { //adjust inventories
r.i--
ops.push({ type: 'put', path: ['agents', json.creator], data: r })
ops.push({ type: 'put', path: ['feed', `${json.block_num}:${json.transaction_id}`], data: `@${json.creator} redeemed DACT for ${json.new_account_name}` })
console.log(ops, 'adjust ACT inventories'); //needs more work
}
if (Object.keys(con).length) { //if a contract account --ACT options ---probably too advanced to build and test together
if (con[`${json.new_account_name}:c`] != null) {
r.ir++ //update redeemed total
//state.bot.push(con[`${json.new_account_name}:c`]) //push payment to multisig bot build a thing for this
ops.push({ type: 'del', path: ['contracts', json.creator, json.new_account_name + ':c'] })
}
ops.push({ type: 'put', path: ['agents', json.creator], data: r })
console.log(ops, 'create'); //needs more work
store.batch(ops, pc)
}
})
.catch(e => { console.log(e) })
});
*/

View File

@ -241,7 +241,7 @@ json:nft_define: {
}
*/
exports.nft_define = function(json, from, active, pc) {
if (active && (from == 'disregardfiat' || from == 'hivefolks')){
if (active){
let statsp = getPathObj(['stats']),
balp = getPathObj(['balances']),
setp = getPathObj(['sets', json.name])

View File

@ -37,4 +37,36 @@ exports.send = (json, from, active, pc) => {
store.batch(ops, pc);
})
.catch(e => { console.log(e); });
}
exports.claim = (json, from, active, pc) => {
let fbalp = getPathNum(['cbalances', from]),
tbp = getPathNum(['balances', from]),
splitp = getPathNum([json.gov ? 'gov': 'pow', from]),
totp = getPathNum([json.gov ? 'gov': 'pow', 't']);
Promise.all([fbalp, tbp, splitp, totp])
.then(bals => {
let fbal = bals[0],
tbal = bals[1],
split = bals[2],
tot = bals[3],
ops = [],
claim = parseInt(fbal);
if (claim > 0) {
const half = parseInt(claim / 2),
other = claim - half,
msg = `@${from}| Claimed ${parseFloat(parseInt(claim) / 1000).toFixed(3)} ${config.TOKEN} - Half ${json.gov ? 'locked in gov': 'powered up.'}`
ops.push({ type: 'del', path: ['cbalances', from] });
ops.push({ type: 'put', path: ['balances', from], data: parseInt(tbal + half) });
ops.push({ type: 'put', path: [json.gov ? 'gov': 'pow', from], data: parseInt(split + other) });
ops.push({ type: 'put', path: [json.gov ? 'gov': 'pow', 't'], data: parseInt(tot + other) });
if (config.hookurl || config.status) postToDiscord(msg, `${json.block_num}:${json.transaction_id}`)
ops.push({ type: 'put', path: ['feed', `${json.block_num}:${json.transaction_id}`], data: msg });
} else {
ops.push({ type: 'put', path: ['feed', `${json.block_num}:${json.transaction_id}`], data: `@${from}| Invalid claim operation` });
}
if (process.env.npm_lifecycle_event == 'test') pc[2] = ops
store.batch(ops, pc);
})
.catch(e => { console.log(e); });
}

View File

@ -7,32 +7,77 @@ const { chronAssign } = require('./../lil_ops')
const { verify_broadcast } = require('./../tally')
exports.account_update = (json, pc) => {
Pnode = getPathObj(['makerts', 'node', json.account])
Promise.all([Pnode])
.then(r => {
node = r[0]
if (Object.keys(node).length) {
if (JSON.parse(json.json_metadata).dluxPubKey) {
node.pubKey_to_verify = JSON.parse(json.json_metadata).dluxPubKey
} else if (json.active && json.active.key_auths.length == 1) {
node.pubKey_to_verify = json.active.key_auths[0][0]
} else {
pc[0](pc[2])
if(json.account == config.msaccount){
var ops = [{type:'del', path:['stats', 'ms']}]
if(json.active) {
let account_auths = {}
for (var i = 0; i < json.active.account_auths.length;i++){
account_auths[json.active.account_auths[i][0]] = json.active.account_auths[i][1]
}
ops.push({type:'put', path:['stats', 'ms', 'active_account_auths'], data: account_auths})
if(json.active.weight_threshold) ops.push({type:'put', path:['stats', 'ms', 'active_threshold'], data: json.active.weight_threshold})
}
if(json.owner) {
let owner_key_auths = {}
for (var i = 0; i < json.owner.owner_key_auths.length;i++){
owner_key_auths[json.owner.owner_key_auths[i][0]] = json.owner.owner_key_auths[i][1]
}
ops.push({type:'put', path:['stats', 'ms', 'owner_key_auths'], data: owner_key_auths})
if(json.owner.weight_threshold) ops.push({type:'put', path:['stats', 'ms', 'owner_threshold'], data: json.owner.weight_threshold})
}
if(json.posting) {
let paccount_auths = {}
for (var i = 0; i < json.posting.account_auths.length;i++){
paccount_auths[json.posting.account_auths[i][0]] = json.posting.account_auths[i][1]
}
ops.push({type:'put', path:['stats', 'ms', 'active_account_auths'], data: paccount_auths})
if(json.posting.weight_threshold) ops.push({type:'put', path:['stats', 'ms', 'posting_threshold'], data: json.posting.weight_threshold})
}
if(json.memo_key) ops.push({type:'put', path:['stats', 'ms', 'memo_key'], data: json.memo_key})
getPathObj(['mss']).then(mss => {
var done = false
for (var block in mss){
if([block].indexOf('account_update') > 0){
ops.push({type:'del', path:['mss', `${block}`]})
ops.push({type:'del', path:['mss', `${block}:sigs`]})
store.batch(ops, pc)
done = true
}
let ops = [{ type: 'put', path: ['makerts', 'node', json.account], data: node }]
}
if (!done) {
store.batch(ops, pc)
} else if (json.account == config.msaccount) {
deleteObjs(['ms', 'ops', `account_update:${JSON.parse(json.json_metadata).id}`])
.then(empty => pc[0](pc[2]))
} else {
pc[0](pc[2])
}
})
.catch(e => {
pc[0](pc[2])
})
} else {
pc[0](pc[2])
}
}
/*
"ms": {
"account": "dlux-cc",
"active_account_auths": {
"disregardfiat": 1,
"dlux-io": 1,
"markegiles": 1
},
"active_threshold": 2,
"memo_key": "STM5se9o2oZwY7ztpo2scyvf12RR41zaYa6rozBtetwfr1DmH1J5k",
"owner_key_auths": {
"STM5Rp1fWQMS7tAPVqatg8B22faeJGcKkfsez3mgUwGZPE9aqWd6X": 1,
"STM7Hgi4pjf5e7u6oKLdhWfgForEVikzvpkK5ejdaMzAzH6dWAtAD": 1,
"STM8TPTJXiCbGaEhAheXxQqbX4isq3UWiPuQBnHLmCKpmmNXhu31m": 1
},
"owner_threshold": 2,
"posting_account_auths": {
"disregardfiat": 1,
"dlux-io": 1,
"markegiles": 1
},
"posting_threshold": 1
}
*/
exports.sig_submit = (json, from, active, pc) => {
var Pop = getPathObj(['mss', `${json.sig_block}`]),
Psigs = getPathObj(['mss', `${json.sig_block}:sigs`]),
@ -63,4 +108,89 @@ exports.sig_submit = (json, from, active, pc) => {
}
})
.catch(e => { console.log(e); });
}
}
/*
processor.onOperation('account_update', function(json, pc) { //ensure proper keys are on record for DAO accounts
let agentsP = getPathObj(['agents']),
statsP = getPathObj(['stats']),
keysP = getPathObj(['keyPairs'])
Promise.all([agentsP, statsP, keysP])
.then(a => {
let agents = a[0],
stats = a[1],
keyPairs = a[2],
ops = []
if (json.account == config.msaccount) {
stats.auths = {}
for (var agent in agents) {
agents[agent].o = 0
}
for (var i = 0; i < json.owner.key_auths.length; i++) {
stats.auth[json.owner.key_auths[i][0]] = 1
agents[keyPairs[json.owner.key_auths[i][0]]].o = 1
}
//auto update active public keys
ops.push({ type: 'put', path: ['stats'], data: stats })
ops.push({ type: 'put', path: ['agents'], data: agents })
console.log(ops);
store.batch(ops, pc)
} else if (agents[json.account] != null && json.active != null) {
ops.push({ type: 'put', path: ['agents', json.account, 'p'], data: json.active.key_auths[0][0] }) //keep record of public keys of agents
ops.push({ type: 'put', path: ['keyPairs', json.active.key_auths[0][0]], data: json.account })
console.log(ops);
store.batch(ops, pc)
} else {
pc[0](pc[2])
}
})
.catch(e => { console.log(e) })
});
processor.onOperation('claim_account', function(json, pc) {
getPathObj(['agents', json.creator])
.then(re => {
let r = re,
ops = []
if (Object.keys(r).length) { //adjust inventories
r.i++
ops.push({ type: 'put', path: ['agents', json.creator], data: r })
ops.push({ type: 'put', path: ['feed', `${json.block_num}:${json.transaction_id}`], data: `${json.creator} claimed an ACT` })
console.log({ msg: 'claim', ops });
store.batch(ops, pc)
} else {
pc[0](pc[2])
}
})
.catch(e => { console.log(e) })
});
processor.onOperation('create_claimed_account', function(json, pc) {
let agentP = getPathObj(['agents', json.creator]),
conP = getPathObj(['contracts', json.creator, json.new_account_name + ':c'])
Promise.all([agentP, conP])
.then(a => {
let r = a[0],
con = a[1],
ops = []
if (Object.keys(r).length) { //adjust inventories
r.i--
ops.push({ type: 'put', path: ['agents', json.creator], data: r })
ops.push({ type: 'put', path: ['feed', `${json.block_num}:${json.transaction_id}`], data: `@${json.creator} redeemed DACT for ${json.new_account_name}` })
console.log(ops, 'adjust ACT inventories'); //needs more work
}
if (Object.keys(con).length) { //if a contract account --ACT options ---probably too advanced to build and test together
if (con[`${json.new_account_name}:c`] != null) {
r.ir++ //update redeemed total
//state.bot.push(con[`${json.new_account_name}:c`]) //push payment to multisig bot build a thing for this
ops.push({ type: 'del', path: ['contracts', json.creator, json.new_account_name + ':c'] })
}
ops.push({ type: 'put', path: ['agents', json.creator], data: r })
console.log(ops, 'create'); //needs more work
store.batch(ops, pc)
}
})
.catch(e => { console.log(e) })
});
*/

View File

@ -1,6 +1,6 @@
const fetch = require('node-fetch');
const { Base64 } = require('./helpers');
module.exports = function(client, steem, currentBlockNumber = 1, blockComputeSpeed = 1000, prefix = '', mode = 'latest', cycleapi) {
module.exports = function(client, hive, currentBlockNumber = 1, blockComputeSpeed = 1000, prefix = '', mode = 'latest', cycleapi) {
var onCustomJsonOperation = {}; // Stores the function to be run for each operation id.
var onOperation = {};
@ -52,24 +52,25 @@ module.exports = function(client, steem, currentBlockNumber = 1, blockComputeSpe
if (currentBlockNumber >= result) {
callback(true);
} else {
callback(false);
callback(result - currentBlockNumber);
}
})
}
function beginBlockComputing() {
function computeBlock() {
function computeBlock(behind) {
var blockNum = currentBlockNumber; // Helper variable to prevent race condition
// in getBlock()
var vops = getVops(blockNum)
function getBlock(bn){
return new Promise ((resolve, reject)=>{
gb(bn, 0)
if(behind)gbr(bn, behind > 100 ? 100 : behind, 0)
else gb(bn, 0)
function gb (bln, at){
client.database.getBlock(bln)
.then((result) => {
resolve(result)
resolve([result])
})
.catch((err) => {
if (at < 3){
@ -79,21 +80,72 @@ module.exports = function(client, steem, currentBlockNumber = 1, blockComputeSpe
}
})
}
function gbr (bln, count, at){
fetch("https://api.hive.blog", {
body: `{"jsonrpc":"2.0", "method":"block_api.get_block_range", "params":{"starting_block_num": ${bln}, "count": ${count}}, "id":1}`,
headers: {
"Content-Type": "application/x-www-form-urlencoded"
},
method: "POST"
})
.then(res => res.json())
.then((result) => {
var blocks =result.result.blocks
for (var i = 0; i < blocks.length; i++){
const bkn = parseInt(blocks[i].block_id.slice(0, 8), 16);
for (var j = 0; j < blocks[i].transactions.length; j++){
blocks[i].transactions[j].block_num = bkn
blocks[i].transactions[j].transaction_id = blocks[i].transaction_ids[j]
blocks[i].transactions[j].transaction_num = j
var ops = []
for(var k = 0; k < blocks[i].transactions[j].operations.length; k++){
ops.push([blocks[i].transactions[j].operations[k].type.replace('_operation', ''), blocks[i].transactions[j].operations[k].value])
}
blocks[i].transactions[j].operations = ops
}
}
resolve(blocks)
})
.catch((err) => {
if (at < 3){
gb(bn,at)
} else {
reject(err)
}
})
}
})
}
getBlock(blockNum)
.then((result) => {
processBlock(result, blockNum, vops)
pl(result)
function pl (range){
pb(range.shift(), range.length)
.then(res =>{
if(res == 'NEXT'){
blockNum++
pl(range)
}
})
}
function pb(bl, remaining) {
return new Promise((resolve, reject) => {
processBlock(bl, blockNum, vops)
.then(r => {
currentBlockNumber++;
if (!stopping) {
if (!stopping && !remaining) {
isAtRealTime(function(result) {
if (!result) {
setTimeout(computeBlock, blockComputeSpeed);
} else {
if (result === true) {
beginBlockStreaming();
//setTimeout(computeBlock, blockComputeSpeed);
} else {
//computeBlock()
computeBlock(result)
resolve('DONE')
}
})
} else if (remaining){
resolve('NEXT')
} else {
console.log('failed at stopping')
//setTimeout(stopCallback, 1000);
@ -101,6 +153,8 @@ module.exports = function(client, steem, currentBlockNumber = 1, blockComputeSpe
}
})
.catch(e => { console.log('failed at catch:', e) })
})
}
})
.catch((err) => {
console.log('get block catch:' + err)
@ -115,7 +169,7 @@ module.exports = function(client, steem, currentBlockNumber = 1, blockComputeSpe
isStreaming = true;
onStreamingStart();
if (mode === 'latest') {
stream = client.blockchain.getBlockStream({ mode: steem.BlockchainMode.Latest });
stream = client.blockchain.getBlockStream({ mode: hive.BlockchainMode.Latest });
} else {
stream = client.blockchain.getBlockStream();
}
@ -140,7 +194,6 @@ module.exports = function(client, steem, currentBlockNumber = 1, blockComputeSpe
function transactional(ops, i, pc, num, block, vops) {
if (ops.length) {
doOp(ops[i], [ops, i, pc, num, block, vops])
.then(v => {
if (ops.length > i + 1) {
@ -226,11 +279,13 @@ module.exports = function(client, steem, currentBlockNumber = 1, blockComputeSpe
function processBlock(block, num, Pvops) {
return new Promise((resolve, reject) => {
var transactions = block.transactions;
//console.log(transactions[0])
let ops = []
for (var i = 0; i < transactions.length; i++) {
for (var j = 0; j < transactions[i].operations.length; j++) {
var op = transactions[i].operations[j];
if (op[0] === 'custom_json') {
//console.log('check')
if (typeof onCustomJsonOperation[op[1].id] === 'function') {
var ip = JSON.parse(op[1].json),
from = op[1].required_posting_auths[0],

View File

@ -602,18 +602,9 @@ exports.detail = (req, res, next) => {
Promise.all([stats, hiveStats])
.then(function(v) {
console.log(RAM.hiveDyn)
const DLUX = {
name: 'Decentralized Limitless User eXperiences',
symbol: 'DLUX',
icon: 'https://www.dlux.io/img/dlux-hive-logo-alpha.svg',
supply:'5% Fixed Inflation, No Cap.',
incirc: parseFloat(v[0].tokenSupply / 1000).toFixed(3),
wp:`https://docs.google.com/document/d/1_jHIJsX0BRa5ujX0s-CQg3UoQC2CBW4wooP2lSSh3n0/edit?usp=sharing`,
ws:`https://www.dlux.io`,
be:`https://hiveblockexplorer.com/`,
text: `DLUX is a Web3.0 technology that is focused on providing distribution of eXtended (Virtual and Augmented) Reality. It supports any browser based applications that can be statically delivered through IPFS. The DLUX Token Architecture is Proof of Stake as a layer 2 technology on the HIVE blockchain to take advantage of free transactions. With the first WYSIWYG VR Builder of any blockchain environment and the first Decentralized Exchange on the Hive Blockchain, DLUX is committed to breaking any boundaries for adoption of world changing technologies.`
},
HIVE ={
var TOKEN = config.detail
TOKEN.incirc = parseFloat(v[0].tokenSupply / 1000).toFixed(3)
const HIVE ={
name: 'HIVE',
symbol: 'HIVE',
icon: 'https://www.dlux.io/img/hextacular.svg',
@ -637,7 +628,7 @@ exports.detail = (req, res, next) => {
}
res.send(JSON.stringify({
coins: [DLUX,HIVE,HBD],
coins: [TOKEN,HIVE,HBD],
node: config.username,
behind: RAM.behind,
VERSION
@ -1609,6 +1600,11 @@ exports.coincheck = (state) => {
supply += state.balances[bal]
lbal += state.balances[bal]
}
cbal = 0
for (bal in state.cbalances) {
supply += state.cbalances[bal]
cbal += state.cbalances[bal]
}
var gov = 0,
govt = 0
var con = 0
@ -1675,8 +1671,8 @@ exports.coincheck = (state) => {
in_auctions: ah,
in_market: am,
in_NFTS: bond,
in_dividends: div
in_dividends: div,
in_claims: cbal
}
}
return {check, info, supply}
@ -1700,6 +1696,8 @@ exports.coin = (req, res, next) => {
exports.user = (req, res, next) => {
let un = req.params.un,
bal = getPathNum(['balances', un]),
cbal = getPathNum(['cbalances', un]),
claims = getPathObj(['claims', un]),
pb = getPathNum(['pow', un]),
lp = getPathNum(['granted', un, 't']),
lg = getPathNum(['granting', un, 't']),
@ -1709,12 +1707,21 @@ exports.user = (req, res, next) => {
pup = getPathObj(['up', un]),
pdown = getPathObj(['down', un])
res.setHeader('Content-Type', 'application/json');
Promise.all([bal, pb, lp, contracts, incol, gp, pup, pdown, lg])
Promise.all([bal, pb, lp, contracts, incol, gp, pup, pdown, lg, cbal, claims])
.then(function(v) {
var arr = []
for (var i in v[3]) {arr.push(v[3][i])}
res.send(JSON.stringify({
balance: v[0],
claim: v[9],
drop: {
availible: {
"amount": 10000,
"precision": 3,
"token": "LARYNX"
},
last_claim: 12
},//v[10],
poweredUp: v[1],
granted: v[2],
granting: v[8],

View File

@ -94,6 +94,7 @@ exports.tally = (num, plasma, isStreaming) => {
if (consensus) {
stats.hashLastIBlock = consensus;
stats.lastIBlock = num - 100
let counting_array = []
for (node in tally.agreements.hashes) {
if (tally.agreements.hashes[node] == consensus) {
new_queue[node] = {
@ -101,14 +102,12 @@ exports.tally = (num, plasma, isStreaming) => {
api: nodes[node].domain,
l: nodes[node].liquidity || 100
}
counting_array.push(new_queue[node].g)
}
}
let counting_array = []
for (node in new_queue) {
if (runners.hasOwnProperty(node)) {
still_running[node] = new_queue[node]
counting_array.push(new_queue[node].g)
} else {
election[node] = new_queue[node]
}
@ -117,11 +116,21 @@ exports.tally = (num, plasma, isStreaming) => {
//minimum to outweight large initial stake holders
//adjust size of runners group based on stake
let low_sum = 0,
last_bal = 0
counting_array.sort((a, b) => a - b)
for (i = 0; i < parseInt(counting_array.length / 2) + 1; i++) {
low_sum += counting_array[i]
last_bal = counting_array[i]
last_bal = 0,
highest_low_sum = 0,
optimal_number = 0
counting_array.sort((a, b) => b - a)
for (var j = 9; j < counting_array.length || j == 25; j++) {
low_sum = 0
for (i = parseInt(j / 2) + 1; i < j; i++) {
low_sum += counting_array[i]
last_bal = counting_array[i]
}
if (low_sum > highest_low_sum) {
highest_low_sum = low_sum
optimal_number = j
stats.gov_threshhold = last_bal
}
}
if (Object.keys(still_running).length < 25) {
let winner = {
@ -147,7 +156,7 @@ exports.tally = (num, plasma, isStreaming) => {
let collateral = []
let liq_rewards = []
for (node in still_running) {
collateral.push(still_running[node].t)
collateral.push(still_running[node].g)
liq_rewards.push(still_running[node].l || 100)
}
let liq_rewards_sum = 0