Changes done in 'origin/develop' specific to 1.27 (HAF based hivemind)

This commit is contained in:
Bartek Wrona 2022-12-20 07:26:30 +01:00
commit 3bfe4820fb
174 changed files with 4045 additions and 5334 deletions

View File

@ -54,3 +54,5 @@ local.db
*.db
envfile
/deploy
Dockerfile

View File

@ -1,394 +1,234 @@
stages:
- build
- test
- sync-e2e-benchmark
- data-supply
- deploy
- e2e-test
- benchmark-tests
- post-deploy
- collector
.setup-pip: &setup-pip
- python -m venv .venv
- source .venv/bin/activate
- time pip install --upgrade pip setuptools wheel
- pip install pyyaml
- pip install .[tests]
.setup-runner-env: &setup-runner-env
# Setup runner environment (to connect to correct postgres server, mainly).
- TMP_VAR=$(cat hive-sync-runner-id.txt 2>/dev/null || true); export HIVE_SYNC_RUNNER_ID=${TMP_VAR:-0}
- eval $(cat "$RUNNER_CONF" | ./scripts/ci/setup_env.py --current-runner-id=${CI_RUNNER_ID} --hive-sync-runner-id=${HIVE_SYNC_RUNNER_ID})
- export RUNNER_POSTGRES_HOST=172.17.0.1
- export RUNNER_POSTGRES_PORT=25432
- echo "Postgres host $RUNNER_POSTGRES_HOST port $RUNNER_POSTGRES_PORT"
include:
- project: hive/haf
ref: develop
file: /scripts/ci-helpers/prepare_data_image_job.yml
.set-variables: &set-variables
# - export # List all variables and its values set by Gitlab CI.
- whoami
- echo "CI_RUNNER_ID is $CI_RUNNER_ID"
- echo "CI_PIPELINE_URL is $CI_PIPELINE_URL"
- echo "CI_PIPELINE_ID is $CI_PIPELINE_ID"
- echo "CI_COMMIT_SHORT_SHA is $CI_COMMIT_SHORT_SHA"
- echo "CI_COMMIT_REF_SLUG is $CI_COMMIT_REF_SLUG"
- export HIVEMIND_DB_NAME=${HIVEMIND_DB_NAME//[^a-zA-Z0-9_]/_}
- echo "HIVEMIND_DB_NAME is $HIVEMIND_DB_NAME"
.fetch-git-tags: &fetch-git-tags
#- git fetch --tags # Looks to be unnecessary.
- scripts/ci/fix_ci_tag.sh
#- echo git tag -f ci_implicit_tag # Needed to build python package
variables:
# HIVEMIND
RUNNER_HIVEMIND_SERVER_HTTP_PORT: 8080
# HAF
HAF_POSTGRES_URL: postgresql://haf_app_admin@haf-instance:5432/haf_block_log
HAF_ADMIN_POSTGRES_URL: postgresql://haf_admin@haf-instance:5432/haf_block_log
# FF:
FF_NETWORK_PER_BUILD: 1
# GIT:
GIT_DEPTH: 1
GIT_STRATEGY: clone
GIT_SUBMODULE_STRATEGY: recursive
workflow:
# do not create pipeline twice when merge requests is open on current branch
rules:
- if: '$CI_PIPELINE_SOURCE == "web"'
- if: '$CI_MERGE_REQUEST_ID'
- if: '$CI_OPEN_MERGE_REQUESTS'
when: never
- when: always
#>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>| ANCHORS |>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
.shared_tags:
tags: &shared_tags
- public-runner-docker
- hived-for-tests
.start-timer: &start-timer
- ./scripts/ci/timer.sh start
.stop-timer: &stop-timer
.check-timer: &check-timer
- ./scripts/ci/timer.sh check
.hive-sync-script-common: &hive-sync-script-common
- echo "${CI_RUNNER_ID}" > hive-sync-runner-id.txt
- ./scripts/ci/wait-for-postgres.sh "$RUNNER_POSTGRES_HOST" "$RUNNER_POSTGRES_PORT"
- export POSTGRES_MAJOR_VERSION=$(./scripts/ci/get-postgres-version.sh)
- ./scripts/ci/drop-db.sh
- ./scripts/ci/create-db.sh
- ./scripts/ci/hive-sync.sh
- ./scripts/ci/collect-db-stats.sh
#<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<| ANCHORS |<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
.rules-for-sync: &rules-for-sync
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
when: always
- if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
when: always
- if: '$CI_COMMIT_BRANCH == "develop"'
when: always
- if: '$CI_PIPELINE_SOURCE == "push"'
when: manual
- when: manual
.rules-for-test: &rules-for-test
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
when: on_success
- if: '$CI_PIPELINE_SOURCE == "push"'
when: on_success
- when: on_success
.default:
image: $CI_REGISTRY_IMAGE/ci_base_image:3.8
interruptible: true
inherit:
default: false
variables: false
variables:
GIT_DEPTH: 1
GIT_STRATEGY: clone
GIT_SUBMODULE_STRATEGY: recursive
PIPENV_VENV_IN_PROJECT: 1
PIPENV_CACHE_DIR: "$CI_PROJECT_DIR/.cache/pipenv"
PIP_CACHE_DIR: "$CI_PROJECT_DIR/.cache/pip"
POSTGRES_CLIENT_TOOLS_PATH: /usr/lib/postgresql
HIVEMIND_DB_NAME: "hive_${CI_COMMIT_REF_SLUG}"
cache: &global-cache
# Per-branch caching. CI_COMMIT_REF_SLUG is the same thing.
# key: "$CI_COMMIT_REF_NAME"
# Per project caching use any key.
# Change this key, if you need to clear cache.
key: common-1
paths:
- .cache/
- .venv/
- .tox/
before_script:
- *start-timer
- *fetch-git-tags
- *set-variables
- *setup-pip
- *setup-runner-env
after_script:
- *stop-timer
##### Jobs #####
.hivemind-sync:
# Postgres shared on host.
extends: .default
<<: *rules-for-sync
stage: data-supply
needs: []
script:
- *hive-sync-script-common
artifacts:
paths:
- hivemind-sync.log
- pg-stats
- hive-sync-runner-id.txt
expire_in: 7 days
tags:
- hivemind-heavy-job
.test-common:
extends: .default
<<: *rules-for-test
needs:
- job: hivemind-sync
artifacts: true
allow_failure: false
before_script:
- *start-timer
- *fetch-git-tags
- *set-variables
- *setup-pip
- *setup-runner-env
- ./scripts/ci/wait-for-postgres.sh "$RUNNER_POSTGRES_HOST" "$RUNNER_POSTGRES_PORT"
- ./scripts/ci/hive-server.sh start
after_script:
- *stop-timer
tags:
- hivemind-light-job
#>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>| BASH SCRIPTS |>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
.bridge_api_smoketest-script: &bridge_api_smoketest-script
- |
./scripts/ci/start-api-smoketest.sh \
localhost "$RUNNER_HIVEMIND_SERVER_HTTP_PORT" \
bridge_api_patterns/ api_smoketest_bridge.xml \
$RUNNER_TEST_JOBS
.bridge_api_smoketest:
stage: e2e-test
extends: .test-common
script:
- *bridge_api_smoketest-script
artifacts:
when: always
reports:
junit: api_smoketest_bridge.xml
localhost \
$RUNNER_HIVEMIND_SERVER_HTTP_PORT \
bridge_api_patterns/ \
api_smoketest_bridge.xml \
$RUNNER_PYTEST_WORKERS
.bridge_api_smoketest_negative-script: &bridge_api_smoketest_negative-script
- |
./scripts/ci/start-api-smoketest.sh \
localhost "$RUNNER_HIVEMIND_SERVER_HTTP_PORT" \
bridge_api_negative/ api_smoketest_bridge_negative.xml \
$RUNNER_TEST_JOBS
.bridge_api_smoketest_negative:
stage: e2e-test
extends: .test-common
script:
- *bridge_api_smoketest_negative-script
artifacts:
when: always
reports:
junit: api_smoketest_bridge_negative.xml
localhost \
$RUNNER_HIVEMIND_SERVER_HTTP_PORT \
bridge_api_negative/ \
api_smoketest_bridge_negative.xml \
$RUNNER_PYTEST_WORKERS
.condenser_api_smoketest-script: &condenser_api_smoketest-script
- |
./scripts/ci/start-api-smoketest.sh \
localhost "$RUNNER_HIVEMIND_SERVER_HTTP_PORT" \
condenser_api_patterns/ api_smoketest_condenser_api.xml \
$RUNNER_TEST_JOBS
.condenser_api_smoketest:
stage: e2e-test
extends: .test-common
script:
- *condenser_api_smoketest-script
artifacts:
when: always
reports:
junit: api_smoketest_condenser_api.xml
localhost \
$RUNNER_HIVEMIND_SERVER_HTTP_PORT \
condenser_api_patterns/ \
api_smoketest_condenser_api.xml \
$RUNNER_PYTEST_WORKERS
.condenser_api_smoketest_negative-script: &condenser_api_smoketest_negative-script
- |
./scripts/ci/start-api-smoketest.sh \
localhost "$RUNNER_HIVEMIND_SERVER_HTTP_PORT" \
condenser_api_negative/ api_smoketest_condenser_api_negative.xml \
$RUNNER_TEST_JOBS
.condenser_api_smoketest_negative:
stage: e2e-test
extends: .test-common
script:
- *condenser_api_smoketest_negative-script
artifacts:
when: always
reports:
junit: api_smoketest_condenser_api_negative.xml
localhost \
$RUNNER_HIVEMIND_SERVER_HTTP_PORT \
condenser_api_negative/ \
api_smoketest_condenser_api_negative.xml \
$RUNNER_PYTEST_WORKERS
.database_api_smoketest-script: &database_api_smoketest-script
- |
./scripts/ci/start-api-smoketest.sh \
localhost "$RUNNER_HIVEMIND_SERVER_HTTP_PORT" \
database_api_patterns/ api_smoketest_database_api.xml \
$RUNNER_TEST_JOBS
.database_api_smoketest:
stage: e2e-test
extends: .test-common
script:
- *database_api_smoketest-script
artifacts:
when: always
reports:
junit: api_smoketest_database_api.xml
localhost \
$RUNNER_HIVEMIND_SERVER_HTTP_PORT \
database_api_patterns/ \
api_smoketest_database_api.xml \
$RUNNER_PYTEST_WORKERS
.database_api_smoketest_negative-script: &database_api_smoketest_negative-script
- |
./scripts/ci/start-api-smoketest.sh \
localhost "$RUNNER_HIVEMIND_SERVER_HTTP_PORT" \
database_api_negative/ api_smoketest_database_api_negative.xml \
$RUNNER_TEST_JOBS
localhost \
$RUNNER_HIVEMIND_SERVER_HTTP_PORT \
database_api_negative/ \
api_smoketest_database_api_negative.xml \
$RUNNER_PYTEST_WORKERS
.database_api_smoketest_negative:
stage: e2e-test
extends: .test-common
script:
- *database_api_smoketest_negative-script
artifacts:
when: always
reports:
junit: api_smoketest_database_api_negative.xml
.follow_api_smoketest-script: &follow_api_smoketest-script
- |
./scripts/ci/start-api-smoketest.sh \
localhost "$RUNNER_HIVEMIND_SERVER_HTTP_PORT" \
follow_api_patterns/ api_smoketest_follow_api.xml \
$RUNNER_TEST_JOBS
.follow_api_smoketest:
stage: e2e-test
extends: .test-common
script:
- *follow_api_smoketest-script
artifacts:
when: always
reports:
junit: api_smoketest.xml
localhost \
$RUNNER_HIVEMIND_SERVER_HTTP_PORT \
follow_api_patterns/ \
api_smoketest_follow_api.xml \
$RUNNER_PYTEST_WORKERS
.follow_api_smoketest_negative-script: &follow_api_smoketest_negative-script
- |
./scripts/ci/start-api-smoketest.sh \
localhost "$RUNNER_HIVEMIND_SERVER_HTTP_PORT" \
follow_api_negative/ api_smoketest_follow_api_negative.xml \
$RUNNER_TEST_JOBS
.follow_api_smoketest_negative:
stage: e2e-test
extends: .test-common
script:
- *follow_api_smoketest_negative-script
artifacts:
when: always
reports:
junit: api_smoketest_follow_api_negative.xml
localhost \
$RUNNER_HIVEMIND_SERVER_HTTP_PORT \
follow_api_negative/ \
api_smoketest_follow_api_negative.xml \
$RUNNER_PYTEST_WORKERS
.tags_api_smoketest-script: &tags_api_smoketest-script
- |
./scripts/ci/start-api-smoketest.sh \
localhost "$RUNNER_HIVEMIND_SERVER_HTTP_PORT" \
tags_api_negative/ api_smoketest_tags_api_negative.xml \
$RUNNER_TEST_JOBS
.tags_api_smoketest:
stage: e2e-test
extends: .test-common
script:
- *tags_api_smoketest-script
artifacts:
when: always
reports:
junit: api_smoketest_tags_api_negative.xml
localhost \
$RUNNER_HIVEMIND_SERVER_HTTP_PORT \
tags_api_negative/ \
api_smoketest_tags_api_negative.xml \
$RUNNER_PYTEST_WORKERS
.tags_api_smoketest_negative-script: &tags_api_smoketest_negative-script
- |
./scripts/ci/start-api-smoketest.sh \
localhost "$RUNNER_HIVEMIND_SERVER_HTTP_PORT" \
tags_api_patterns/ api_smoketest_tags_api.xml \
$RUNNER_TEST_JOBS
.tags_api_smoketest_negative:
stage: e2e-test
extends: .test-common
script:
- *tags_api_smoketest_negative-script
artifacts:
when: always
reports:
junit: api_smoketest_tags_api.xml
localhost \
$RUNNER_HIVEMIND_SERVER_HTTP_PORT \
tags_api_patterns/ \
api_smoketest_tags_api.xml \
$RUNNER_PYTEST_WORKERS
.mock_tests-script: &mock_tests-script
- |
scripts/ci/start-api-smoketest.sh \
localhost "$RUNNER_HIVEMIND_SERVER_HTTP_PORT" \
mock_tests/ api_smoketest_mock_tests.xml \
$RUNNER_TEST_JOBS
.mock_tests:
stage: e2e-test
extends: .test-common
script:
- *mock_tests-script
artifacts:
reports:
junit: api_smoketest_mock_tests.xml
./scripts/ci/start-api-smoketest.sh \
localhost \
$RUNNER_HIVEMIND_SERVER_HTTP_PORT \
mock_tests/ \
api_smoketest_mock_tests.xml \
$RUNNER_PYTEST_WORKERS
.hive_api_smoketest-script: &hive_api_smoketest-script
- |
scripts/ci/start-api-smoketest.sh \
localhost "$RUNNER_HIVEMIND_SERVER_HTTP_PORT" \
hive_api_patterns/ api_smoketest_hive_api.xml \
$RUNNER_TEST_JOBS
.hive_api_smoketest:
stage: e2e-test
extends: .test-common
script:
- *hive_api_smoketest-script
artifacts:
reports:
junit: api_smoketest_hive_api.xml
./scripts/ci/start-api-smoketest.sh \
localhost \
$RUNNER_HIVEMIND_SERVER_HTTP_PORT \
hive_api_patterns/ \
api_smoketest_hive_api.xml \
$RUNNER_PYTEST_WORKERS
.api-benchmark-script: &api-benchmark-script
- |
./scripts/ci/start-api-benchmarks.sh \
localhost $RUNNER_HIVEMIND_SERVER_HTTP_PORT \
localhost \
$RUNNER_HIVEMIND_SERVER_HTTP_PORT \
$RUNNER_BENCHMARK_ITERATIONS \
$RUNNER_BENCHMARK_JOBS \
$CI_PROJECT_DIR/tests/api_tests/hivemind/tavern
$RUNNER_PYTEST_WORKERS
.api-benchmark:
stage: benchmark-tests
extends: .test-common
# Temporary failure (when any call is longer than 1s is allowed)
allow_failure: true
script:
- *api-benchmark-script
artifacts:
when: always
paths:
- tavern_benchmarks_report.html
#<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<| BASH SCRIPTS |<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
sync-e2e-benchmark:
extends: .default
<<: *rules-for-sync
#>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>| JOBS |>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
prepare_haf_image:
stage: build
extends: .prepare_haf_data_5m_image
variables:
SUBMODULE_DIR: "$CI_PROJECT_DIR/haf"
REGISTRY_USER: "$HAF_IMG_BUILDER_USER"
REGISTRY_PASS: "$HAF_IMG_BUILDER_PASSWORD"
tags: *shared_tags
sync_e2e_benchmark:
image: $CI_REGISTRY_IMAGE/ci_base_image:3.8
stage: sync-e2e-benchmark
needs: []
needs: [ 'prepare_haf_image' ]
when: on_success
services:
- name: $HAF_IMAGE_NAME
alias: haf-instance
variables:
PG_ACCESS: "
host haf_block_log haf_app_admin 0.0.0.0/0 trust\n
host haf_block_log haf_admin 0.0.0.0/0 trust\n
"
variables:
RUNNER_HIVEMIND_SYNC_MAX_BLOCK: 5000024
before_script:
- scripts/ci/fix_ci_tag.sh
- python3 -V
- pip3 -V
- python3 -m venv venv/
- . venv/bin/activate
- pip install --upgrade pip setuptools wheel
- pip install --no-cache-dir .[tests] 2>&1 | tee pip_install.log
- pip list
- env
script:
- *hive-sync-script-common
- echo "HAF image name $HAF_IMAGE_NAME"
- ./scripts/ci/wait-for-postgres.sh $HAF_ADMIN_POSTGRES_URL
- ./scripts/ci/add-mocks-to-db.sh
- psql "${HAF_ADMIN_POSTGRES_URL}" -c 'CREATE EXTENSION IF NOT EXISTS intarray;'
- ./scripts/ci/hive-sync.sh
- ./scripts/ci/collect-db-stats.sh
- ./scripts/ci/hive-server.sh start
- *bridge_api_smoketest-script
- *bridge_api_smoketest_negative-script
@ -407,41 +247,39 @@ sync-e2e-benchmark:
- ./scripts/ci/hive-server.sh start
- *api-benchmark-script
after_script:
- cat venv/lib/python3.8/site-packages/hive/_version.py > version.log
- echo "ARTIFACTS_JOB_ID=$CI_JOB_ID" >> variables.env
- echo "APP_VERSION=$(git describe --tags)" >> variables.env
- echo "SERVER_NAME=$CI_RUNNER_DESCRIPTION" >> variables.env
artifacts:
when: always
paths:
- hivemind-sync.log
- hivemind-server.log
- pg-stats
- hive-sync-runner-id.txt
- tavern_benchmarks_report.html
- tests/api_tests/hivemind/tavern/**/*.out.json
- request_process_times.log
- request_process_times_smoketests.log
- hive/_version.py
expire_in: 7 days
reports:
junit: "*.xml"
dotenv: variables.env
expire_in: 7 days
tags:
- hivemind-heavy-job
paths:
- pip_install.log
- hivemind-sync.log
- hivemind-server.log
- pg-stats
- tests/api_tests/hivemind/tavern/**/*.out.json
- request_process_times.log
- request_process_times_smoketests.log
- version.log
tags: *shared_tags
Trigger benchmark-results-collector:
stage: post-deploy
needs: ["sync-e2e-benchmark"]
stage: collector
needs: [ 'sync_e2e_benchmark' ]
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
when: never
- if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
when: on_success
when: never #on_success
- if: '$CI_COMMIT_BRANCH == "develop"'
when: on_success
- if: '$CI_PIPELINE_SOURCE == "push"'
when: manual
- when: manual
when: never #on_success
- when: never #manual
variables:
ARTIFACTS_URL: https://gitlab.syncad.com/api/v4/projects/$CI_PROJECT_ID/jobs/$ARTIFACTS_JOB_ID/artifacts
PRIVATE_TOKEN: $READ_ARTIFACT_ACCESS_TOKEN
@ -449,11 +287,13 @@ Trigger benchmark-results-collector:
SOURCE: hivemind
JOB_ID: $ARTIFACTS_JOB_ID
DESC: "hivemind CI"
EXEC_ENV_DESC: branch=$CI_COMMIT_REF_SLUG
SERVER_NAME: $SERVER_NAME
APP_VERSION: $APP_VERSION
TESTSUITE_VERSION: commit_short_sha=$CI_COMMIT_SHORT_SHA
EXEC_ENV_DESC: "branch=$CI_COMMIT_REF_SLUG"
SERVER_NAME: "$SERVER_NAME"
APP_VERSION: "$APP_VERSION"
TESTSUITE_VERSION: "commit_short_sha=$CI_COMMIT_SHORT_SHA"
trigger:
project: hive/benchmark-results-collector
branch: master
strategy: depend
strategy: depend
#<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<| JOBS |<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<

3
.gitmodules vendored
View File

@ -1,3 +1,6 @@
[submodule "tests_api"]
path = tests/tests_api
url = ../tests_api.git
[submodule "haf"]
path = haf
url = ../haf.git

82
Dockerfile Normal file
View File

@ -0,0 +1,82 @@
# Base docker file having defined environment for build and run of HAF instance.
# Use scripts/ci/build_ci_base_image.sh to rebuild new version of CI base image. It must be properly tagged and pushed to the container registry
ARG CI_REGISTRY_IMAGE=registry.gitlab.syncad.com/hive/hivemind/
ARG CI_IMAGE_TAG=:ubuntu20.04-1
FROM --platform=$BUILDPLATFORM python:3.8-slim as runtime
ARG TARGETPLATFORM
ARG BUILDPLATFORM
ENV LANG=en_US.UTF-8
ENV TARGETPLATFORM=${TARGETPLATFORM}
ENV BUILDPLATFORM=${BUILDPLATFORM}
RUN apt update && DEBIAN_FRONTEND=noniteractive apt install -y \
bash \
joe \
sudo \
git \
ca-certificates \
postgresql-client \
wget \
procps \
xz-utils \
python3-cffi \
&& DEBIAN_FRONTEND=noniteractive apt-get clean && rm -rf /var/lib/apt/lists/* \
&& useradd -ms /bin/bash "haf_admin" && echo "haf_admin ALL=(ALL) NOPASSWD:ALL" >> /etc/sudoers \
&& useradd -ms /bin/bash "haf_app_admin" \
&& useradd -ms /bin/bash "hivemind"
SHELL ["/bin/bash", "-c"]
FROM ${CI_REGISTRY_IMAGE}runtime${CI_IMAGE_TAG} AS ci-base-image
ENV LANG=en_US.UTF-8
SHELL ["/bin/bash", "-c"]
RUN apt update && DEBIAN_FRONTEND=noniteractive apt install -y gcc && \
git config --global --add safe.directory /home/hivemind/app
FROM ${CI_REGISTRY_IMAGE}ci-base-image${CI_IMAGE_TAG} AS builder
USER hivemind
WORKDIR /home/hivemind
SHELL ["/bin/bash", "-c"]
COPY --chown=hivemind:hivemind . /home/hivemind/app
RUN ./app/scripts/ci/build.sh
FROM ${CI_REGISTRY_IMAGE}runtime${CI_IMAGE_TAG} AS instance
ARG HTTP_PORT=8080
ENV HTTP_PORT=${HTTP_PORT}
# Lets use by default host address from default docker bridge network
ARG POSTGRES_URL="postgresql://haf_app_admin@172.17.0.1/haf_block_log"
ENV POSTGRES_URL=${POSTGRES_URL}
ENV LANG=en_US.UTF-8
USER hivemind
WORKDIR /home/hivemind
SHELL ["/bin/bash", "-c"]
COPY --from=builder --chown=hivemind:hivemind /home/hivemind/app/dist /home/hivemind/dist
COPY --from=builder --chown=hivemind:hivemind /home/hivemind/.hivemind-venv /home/hivemind/.hivemind-venv
COPY --from=builder --chown=hivemind:hivemind /home/hivemind/app/docker/docker_entrypoint.sh .
COPY --from=builder --chown=hivemind:hivemind /home/hivemind/app/scripts /home/hivemind/app
USER haf_admin
# JSON rpc service
EXPOSE ${HTTP_PORT}
STOPSIGNAL SIGINT
ENTRYPOINT [ "/home/hivemind/docker_entrypoint.sh" ]

134
README.md
View File

@ -12,6 +12,7 @@ developers with a more flexible/extensible alternative to the raw hived API.
1. [Environment](#environment)
2. [Installation](#installation)
2a [Installation of dockerized version](#dockerized-setup)
3. [Updating from an existing hivemind database](#updating-from-an-existing-hivemind-database)
4. [Running](#running)
5. [Tests](#tests)
@ -36,11 +37,8 @@ developers with a more flexible/extensible alternative to the raw hived API.
#### Prerequisites:
Create a new database for hivemind:
```bash
$ createdb hive
```
Hivemind is a [HAF](https://gitlab.syncad.com/hive/haf)-based application. To work properly it requires an existing
and working HAF database.
Hivemind also requires the postgresql `intarray` extension to be installed. The postgresql user who has `CREATE`
privilege can load the module with following command:
@ -109,6 +107,80 @@ $ pip install --no-cache-dir --verbose --user . 2>&1 | tee pip_install.log
</details>
## Dockerized setup
### Building
To build image holding Hivemind instance, please use [build_instance.sh](scripts/ci/build_instance.sh). This script requires several parameters:
- a tag identifier to be set on the built image
- directory where Hivemind source code is located
- docker registry url to produce fully qualified image name and allow to correctly resolve its dependencies
```bash
$ # Assuming you are in workdir directory, to perform out of source build
$ ../hivemind/scripts/ci/build_instance.sh local ../hivemind registry.gitlab.syncad.com/hive/hivemind/
```
### Running HAF instance container
A Hivemind instance requires a HAF instance to process incoming blockchain data collected and to store its own data in fork-resistant manner (allows hivemind data to be reverted in case of a fork).
The easiest way to setup a HAF instance is to use a dockerized instance.
To start a HAF instance, we need to prepare a data directory containing:
- a blockchain subdirectory (where can be put the block_log file used by hived)
- optionally, but very useful, a copy of haf/doc/haf_postgresql_conf.d directory, which allows simple customization of Postgres database setup by modification of `custom_postgres.conf` and `custom_pg_hba.conf` files stored inside.
Please take care to set correct file permissions in order to provide write access to the data directory for processes running inside the HAF container.
```bash
$ cd /storage1/haf-data-dir/
$ ../hivemind/haf/scripts/run_hived_img.sh registry.gitlab.syncad.com/hive/haf/instance:instance-<tag> --name=haf-mainnet-instance --data-dir="$(pwd)" <hived-options>
```
For example, for testing purposes (assuming block_log file has been put into data-dir), you can spawn a 5M block replay to prepare a HAF database for further quick testing:
```
../hivemind/haf/scripts/run_hived_img.sh registry.gitlab.syncad.com/hive/haf/instance:instance-v1.27.3.0 --name=haf-mainnet-instance --data-dir="$(pwd)" --replay --stop-replay-at-block=5000000
```
By examining hived.log file or using docker logs haf-mainnet-instance, you can examine state of the started instance. Once replay will be finished, you can continue and start the Hivemind sync process.
Example output of hived process stopped on 5,000,000th block:
```
2022-12-19T18:28:05.574637 chain_plugin.cpp:701 replay_blockchain ] Stopped blockchain replaying on user request. Last applied block numbe
r: 5000000.
2022-12-19T18:28:05.574658 chain_plugin.cpp:966 plugin_startup ] P2P enabling after replaying...
2022-12-19T18:28:05.574670 chain_plugin.cpp:721 work ] Started on blockchain with 5000000 blocks, LIB: 4999980
2022-12-19T18:28:05.574687 chain_plugin.cpp:727 work ] Started on blockchain with 5000000 blocks
2022-12-19T18:28:05.574736 chain_plugin.cpp:993 plugin_startup ] Chain plugin initialization finished...
2022-12-19T18:28:05.574753 sql_serializer.cpp:712 plugin_startup ] sql::plugin_startup()
2022-12-19T18:28:05.574772 p2p_plugin.cpp:466 plugin_startup ] P2P plugin startup...
2022-12-19T18:28:05.574764 chain_plugin.cpp:339 operator() ] Write processing thread started.
2022-12-19T18:28:05.574782 p2p_plugin.cpp:470 plugin_startup ] P2P plugin is not enabled...
2022-12-19T18:28:05.574840 witness_plugin.cpp:648 plugin_startup ] witness plugin: plugin_startup() begin
2022-12-19T18:28:05.574866 witness_plugin.cpp:655 plugin_startup ] Witness plugin is not enabled, beause P2P plugin is disabled...
2022-12-19T18:28:05.574885 wallet_bridge_api_plugin.cpp:20 plugin_startup ] Wallet bridge api plugin initialization...
2022-12-19T18:28:05.574905 wallet_bridge_api.cpp:169 api_startup ] Wallet bridge api initialized. Missing plugins: database_api block_api
account_history_api market_history_api network_broadcast_api rc_api_plugin
2022-12-19T18:28:05.575624 webserver_plugin.cpp:240 operator() ] start processing ws thread
Entering application main loop...
2022-12-19T18:28:05.575687 webserver_plugin.cpp:261 operator() ] start listening for http requests on 0.0.0.0:8090
2022-12-19T18:28:05.575716 webserver_plugin.cpp:263 operator() ] start listening for ws requests on 0.0.0.0:8090
2022-12-19T18:28:35.575535 chain_plugin.cpp:380 operator() ] No P2P data (block/transaction) received in last 30 seconds... peer_count=0
```
### Running Hivemind instance container
The built Hivemind instance requires a preconfigured HAF database to store its data. To perform required database configuration, you should start:
```bash
$ ../hivemind/scripts/setup_postgres.sh --postgres-url=postgresql://haf_app_admin@172.17.0.2/haf_block_log
$ ../hivemind/scripts/setup_db.sh --postgres-url=postgresql://haf_admin@172.17.0.2/haf_block_log # warning this command requires haf_admin access since super user permissions are required to install intarray extension
```
Above commands assume that the running HAF container has IP: 172.17.0.2
```bash
$ ../hivemind/scripts/run_instance.sh registry.gitlab.syncad.com/hive/hivemind/instance:local sync --database-url="postgresql://haf_app_admin@172.17.0.2:5432/haf_block_log"
```
## Updating from an existing hivemind database
```bash
@ -120,15 +192,13 @@ $ ./db_upgrade.sh <user-name> hive
## Running
> If you installed in the virtual environment, don't forget to activate it first
Indicate access to your hivemind database:
Export the URL to your HAF database:
```bash
$ export DATABASE_URL=postgresql://user:pass@localhost:5432/hive
$ export DATABASE_URL=postgresql://hivemind_app:pass@localhost:5432/haf_block_log
```
#### Start the indexer (aka synchronization process):
#### Start the hivemind indexer (aka synchronization process):
```bash
$ hive sync
@ -139,7 +209,7 @@ $ hive status
{'db_head_block': 19930833, 'db_head_time': '2018-02-16 21:37:36', 'db_head_age': 10}
```
#### Start the API server:
#### Start the hivemind API server:
```bash
$ hive server
@ -154,8 +224,15 @@ $ curl --data '{"jsonrpc":"2.0","id":0,"method":"hive.db_head_state","params":{}
To run api tests:
1. Make sure that current version of `hivemind` is installed,
2. Api tests require that `hivemind` is synced to a node replayed up to `5_000_000` blocks.\
1. Make sure that the current version of `hivemind` is installed,
2. Api tests require that `hivemind` is synced to a node replayed up to `5_000_024` blocks (including mocks).\
This means, you should have your HAF database replayed up to `5_000_000` mainnet blocks and run the mocking script with:
```bash
$ cd hivemind/scripts/ci/
$ ./scripts/ci/add-mocks-to-db.sh
```
3. Run `hivemind` in `server` mode
4. Set env variables:
@ -170,23 +247,16 @@ To run api tests:
$ tox -e tavern -- -n auto --durations=0
```
## Production Environment
Deploying Hivemind as a Docker container will be available when Hivemind HAf version will be released.
## Configuration
| Environment | CLI argument | Default |
|----------------------|------------------------|--------------------------------------------|
| `LOG_LEVEL` | `--log-level` | INFO |
| `HTTP_SERVER_PORT` | `--http-server-port` | 8080 |
| `DATABASE_URL` | `--database-url` | postgresql://user:pass@localhost:5432/hive |
| `STEEMD_URL` | `--steemd-url` | '{"default":"https://yourhivenode"}' |
| `MAX_BATCH` | `--max-batch` | 35 |
| `MAX_WORKERS` | `--max-workers` | 6 |
| `MAX_RETRIES` | `--max-retries` | -1 |
| `TRAIL_BLOCKS` | `--trail-blocks` | 2 |
| `HIVED_DATABASE_URL` | `--hived-database-url` | |
| Environment | CLI argument | Default |
|--------------------|----------------------|--------------------------------------------|
| `LOG_LEVEL` | `--log-level` | INFO |
| `HTTP_SERVER_PORT` | `--http-server-port` | 8080 |
| `DATABASE_URL` | `--database-url` | postgresql://user:pass@localhost:5432/hive |
| `MAX_BATCH` | `--max-batch` | 35 |
| `MAX_WORKERS` | `--max-workers` | 6 |
| `MAX_RETRIES` | `--max-retries` | -1 |
Precedence: CLI over ENV over hive.conf. Check `hive --help` for details.
@ -196,13 +266,13 @@ Precedence: CLI over ENV over hive.conf. Check `hive --help` for details.
- Focus on Postgres performance
- 9GB of memory for `hive sync` process
- 750GB storage for database
- 750GB storage for hivemind's use of the database
#### Hive config
#### Hived config
Plugins
- Required: `database_api`,`condenser_api`,`block_api`,`account_history_api`
- Required: `sql_serializer`
#### Postgres Performance
@ -312,7 +382,7 @@ full with their final state.
#### API layer
Perform queries against the core and cache tables, merging them into a response in such a way that the frontend will
Performs queries against the core and cache tables, merging them into a response in such a way that the frontend will
not need to perform any additional calls to `hived` itself. The initial API simply mimics hived's `condenser_api` for
backwards compatibility, but will be extended to leverage new opportunities and simplify application development.

62
docker/docker_entrypoint.sh Executable file
View File

@ -0,0 +1,62 @@
#! /bin/bash
set -euo pipefail
SCRIPTDIR="$( cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )"
cleanup () {
echo "Performing cleanup...."
python_pid=$(pidof 'python3')
echo "python_pid: $python_pid"
sudo -n kill -INT $python_pid
echo "Waiting for hivemind finish..."
tail --pid=$python_pid -f /dev/null || true
echo "Hivemind app finish done."
echo "Cleanup actions done."
}
trap cleanup INT QUIT TERM
HIVEMIND_ARGS=()
while [ $# -gt 0 ]; do
case "$1" in
--database-url=*)
POSTGRES_URL="${1#*=}"
;;
--port=*)
HTTP_PORT="${1#*=}"
;;
*)
HIVEMIND_ARGS+=("$1")
esac
shift
done
pushd /home/hivemind/app
# temporary comment out - fully dockerized version needs separate steps
#./scripts/setup_postgres.sh --postgres-url=${POSTGRES_URL}
#./scripts/setup_db.sh --postgres-url=${POSTGRES_URL}
{
echo "Attempting to start Hivemind process..."
sudo -HEnu hivemind /bin/bash <<EOF
source /home/hivemind/.hivemind-venv/bin/activate
hive "${HIVEMIND_ARGS[@]}" --database-url="${POSTGRES_URL}"
EOF
echo "Hivemind process finished execution: $?"
} &
job_pid=$!
jobs -l
echo "waiting for job finish: $job_pid."
wait $job_pid || true
echo "Exiting docker entrypoint..."

1
haf Submodule

@ -0,0 +1 @@
Subproject commit 0b2974394776cd064dff0e6774bfb23bd2ca28c5

View File

@ -19,18 +19,18 @@ def setup_logging(conf):
if timestamp and epoch:
datefmt = '%Y-%m-%d %H:%M:%S'
timezone = time.strftime('%z')
fmt = f'%(asctime)s.%(msecs)03d{timezone} %(created).6f %(levelname)s - %(name)s - %(message)s'
fmt = f'%(asctime)s.%(msecs)03d{timezone} %(created).6f %(levelname)s - %(name)s:%(lineno)d - %(message)s'
logging.basicConfig(format=fmt, datefmt=datefmt)
elif timestamp:
datefmt = '%Y-%m-%d %H:%M:%S'
timezone = time.strftime('%z')
fmt = f'%(asctime)s.%(msecs)03d{timezone} %(levelname)s - %(name)s - %(message)s'
fmt = f'%(asctime)s.%(msecs)03d{timezone} %(levelname)s - %(name)s:%(lineno)d - %(message)s'
logging.basicConfig(format=fmt, datefmt=datefmt)
elif epoch:
fmt = '%(created).6f %(levelname)s - %(name)s - %(message)s'
fmt = '%(created).6f %(levelname)s - %(name)s:%(lineno)d - %(message)s'
logging.basicConfig(format=fmt)
else:
fmt = '%(levelname)s - %(name)s - %(message)s'
fmt = '%(levelname)s - %(name)s:%(lineno)d - %(message)s'
logging.basicConfig(format=fmt)
@ -81,9 +81,9 @@ def launch_mode(mode, conf):
run_server(conf=conf)
elif mode == 'sync':
from hive.indexer.sync import Sync
from hive.indexer.sync import SyncHiveDb
with Sync(conf=conf) as sync:
with SyncHiveDb(conf=conf) as sync:
sync.run()
elif mode == 'status':

View File

@ -2,16 +2,18 @@
import logging
import re
from typing import Final
import configargparse
from hive.db.adapter import Db
from hive.steem.client import SteemClient
from hive.utils.normalize import int_log_level, strtobool
from hive.utils.stats import DbStats
log = logging.getLogger(__name__)
SCHEMA_NAME: Final[str] = 'hivemind_app'
def _sanitized_conf(parser):
"""Formats parser config, redacting database url password."""
@ -26,7 +28,6 @@ class Conf:
self._args = None
self._env = None
self._db = None
self._steem = None
self.arguments = None
def init_argparse(self, strict=True, **kwargs):
@ -41,27 +42,6 @@ class Conf:
# common
add('--database-url', env_var='DATABASE_URL', required=False, help='database connection url', default='')
add(
'--steemd-url',
env_var='STEEMD_URL',
required=False,
help='steemd/jussi endpoint',
default='{"default" : "https://api.hive.blog"}',
)
add(
'--muted-accounts-url',
env_var='MUTED_ACCOUNTS_URL',
required=False,
help='url to flat list of muted accounts',
default='https://raw.githubusercontent.com/hivevectordefense/irredeemables/master/full.txt',
)
add(
'--blacklist-api-url',
env_var='BLACKLIST_API_URL',
required=False,
help='url to access blacklist api',
default='https://blacklist.usehive.com',
)
# server
add('--http-server-port', type=int, env_var='HTTP_SERVER_PORT', default=8080)
@ -76,14 +56,8 @@ class Conf:
# sync
add('--max-workers', type=int, env_var='MAX_WORKERS', help='max workers for batch requests', default=6)
add('--max-batch', type=int, env_var='MAX_BATCH', help='max chunk size for batch requests', default=35)
add(
'--max-retries',
type=int,
env_var='MAX_RETRIES',
help='max number of retries after request failure is accepted; default -1 means no limit',
default=-1,
)
add('--trail-blocks', type=int, env_var='TRAIL_BLOCKS', help='number of blocks to trail head by', default=2)
# --sync-to-s3 seems to be unnecessary
add(
'--sync-to-s3',
type=strtobool,
@ -91,23 +65,9 @@ class Conf:
help='alternative healthcheck for background sync service',
default=False,
)
add(
'--hived-database-url',
env_var='HIVED_DATABASE_URL',
required=False,
help='Hived blocks database connection url',
default='',
)
# test/debug
add('--log-level', env_var='LOG_LEVEL', default='INFO')
add(
'--test-disable-sync',
type=strtobool,
env_var='TEST_DISABLE_SYNC',
help='(debug) skip sync and sweep; jump to block streaming',
default=False,
)
add(
'--test-max-block',
type=int,
@ -115,12 +75,6 @@ class Conf:
help='(debug) only sync to given block, for running sync test',
default=None,
)
add(
'--test-skip-ais-phase',
env_var='TEST_SKIP_AIS_PHASE',
help='(debug) Allows to skip After-Initial-Sync phase. Useful to go into live sync or exit if TEST_MAX_BLOCK is used',
action='store_true',
)
add('--test-profile', type=strtobool, env_var='TEST_PROFILE', help='(debug) profile execution', default=False)
add(
'--log-request-times',
@ -128,11 +82,17 @@ class Conf:
help='(debug) allows to generate log containing request processing times',
action='store_true',
)
add(
'--log-op-calls',
env_var='LOG_OP_CALLS',
help='(debug) log operations calls and responses',
action='store_true',
)
add(
'--log-virtual-op-calls',
env_var='LOG_VIRTUAL_OP_CALLS',
help='(debug) log virtual op calls and responses',
default=False,
action='store_true',
)
add(
'--mock-block-data-path',
@ -235,29 +195,14 @@ class Conf:
"""Get the raw Namespace object as generated by configargparse"""
return self._args
def steem(self):
"""Get a SteemClient instance, lazily initialized"""
if not self._steem:
from json import loads
self._steem = SteemClient(
url=loads(self.get('steemd_url')),
max_batch=self.get('max_batch'),
max_workers=self.get('max_workers'),
max_retries=self.get('max_retries'),
)
return self._steem
def db(self):
"""Get a configured instance of Db."""
if self._db is None:
url = self.get('database_url')
enable_autoexplain = self.get('log_explain_queries')
assert url, (
'--database-url (or DATABASE_URL env) not specified; ' 'e.g. postgresql://user:pass@localhost:5432/hive'
)
self._db = Db(url, "root db creation", enable_autoexplain)
log.info("The database created...")
assert url, '--database-url (or DATABASE_URL env) not specified'
self._db = Db(url, "root", enable_autoexplain)
log.info("The database instance is created...")
return self._db

View File

@ -80,7 +80,6 @@ class Db:
def clone(self, name):
cloned = Db(self._url, name, self.__autoexplain)
cloned._engine = self._engine
return cloned
@ -88,11 +87,11 @@ class Db:
"""Close connection."""
try:
for item in self._conn:
if item is not None:
log.info(f"Closing database connection: '{item['name']}'")
item['connection'].close()
item = None
log.info(f"Closing database connection: '{item['name']}'")
item['connection'].close()
self._engine.dispose()
self._conn = []
assert self._engine.pool.checkedin() == 0, f'All connections of {self.name} should be closed!'
except Exception as ex:
log.exception(f"Error during connections closing: {ex}")
raise ex
@ -121,16 +120,13 @@ class Db:
self._engine = sqlalchemy.create_engine(
self._url,
isolation_level="READ UNCOMMITTED", # only supported in mysql
pool_size=self.max_connections,
pool_size=Db.max_connections,
pool_recycle=3600,
echo=False,
connect_args={'application_name': f'hivemind_{self.name}'},
)
return self._engine
def get_new_connection(self, name):
self._conn.append({"connection": self.engine().connect(), "name": name})
return self.get_connection(len(self._conn) - 1)
def get_dialect(self):
return self.get_connection(0).dialect
@ -156,32 +152,32 @@ class Db:
# this method is reserved for anything but SELECT
assert self._is_write_query(sql), sql
return self._query(sql, False, **kwargs)
return self._query(sql, **kwargs)
def query_prepared(self, sql, **kwargs):
self._query(sql, True, **kwargs)
def query_no_return(self, sql, **kwargs):
self._query(sql, False, **kwargs)
self._query(sql, **kwargs)
def query_all(self, sql, **kwargs):
"""Perform a `SELECT n*m`"""
res = self._query(sql, False, **kwargs)
res = self._query(sql,**kwargs)
return res.fetchall()
def query_row(self, sql, **kwargs):
"""Perform a `SELECT 1*m`"""
res = self._query(sql, False, **kwargs)
res = self._query(sql, **kwargs)
return first(res)
def query_col(self, sql, **kwargs):
"""Perform a `SELECT n*1`"""
res = self._query(sql, False, **kwargs).fetchall()
res = self._query(sql, **kwargs).fetchall()
return [r[0] for r in res]
def query_one(self, sql, **kwargs):
"""Perform a `SELECT 1*1`"""
row = first(self._query(sql, False, **kwargs))
row = first(self._query(sql, **kwargs))
return first(row) if row else None
def engine_name(self):
@ -251,7 +247,7 @@ class Db:
query = sqlalchemy.text(sql)
return query
def _query(self, sql, is_prepared, **kwargs):
def _query(self, sql, is_prepared: bool = False, **kwargs):
"""Send a query off to SQLAlchemy."""
if sql == 'START TRANSACTION':
assert not self._trx_active

View File

@ -1,4 +1,4 @@
"""Hive db state manager. Check if schema loaded, init synced, etc."""
"""Hive db state manager. Check if schema loaded, massive synced, etc."""
# pylint: disable=too-many-lines
@ -6,14 +6,17 @@ from concurrent.futures import as_completed, ThreadPoolExecutor
import logging
import time
from time import perf_counter
from typing import Optional
import sqlalchemy
from hive.conf import SCHEMA_NAME
from hive.db.adapter import Db
from hive.db.schema import build_metadata, setup, teardown
from hive.indexer.auto_db_disposer import AutoDbDisposer
from hive.server.common.payout_stats import PayoutStats
from hive.utils.communities_rank import update_communities_posts_and_rank
from hive.utils.misc import get_memory_amount
from hive.utils.stats import FinalOperationStatusManager as FOSM
log = logging.getLogger(__name__)
@ -26,8 +29,8 @@ class DbState:
_db = None
# prop is true until initial sync complete
_is_initial_sync = True
# prop is true until massive sync complete
_is_massive_sync = True
@classmethod
def initialize(cls):
@ -35,19 +38,21 @@ class DbState:
1) Load schema if needed
2) Run migrations if needed
3) Check if initial sync has completed
3) Check if massive sync has completed
"""
log.info("[INIT] Welcome to hive!")
log.info("[MASSIVE] Welcome to hive!")
# create db schema if needed
if not cls._is_schema_loaded():
log.info("[INIT] Create db schema...")
setup(cls.db())
log.info("[MASSIVE] Create db schema...")
db_setup = cls.db().clone('setup')
setup(db=db_setup)
db_setup.close()
# check if initial sync complete
cls._is_initial_sync = True
log.info("[INIT] Continue with initial sync...")
# check if massive sync complete
cls._is_massive_sync = True
log.info("[MASSIVE] Continue with massive sync...")
@classmethod
def teardown(cls):
@ -62,17 +67,18 @@ class DbState:
return cls._db
@classmethod
def finish_initial_sync(cls, current_imported_block):
"""Set status to initial sync complete."""
assert cls._is_initial_sync, "initial sync was not started."
cls._after_initial_sync(current_imported_block)
cls._is_initial_sync = False
log.info("[INIT] Initial sync complete!")
def finish_massive_sync(cls, current_imported_block) -> None:
"""Set status to massive sync complete."""
if not cls._is_massive_sync:
return
cls._after_massive_sync(current_imported_block)
cls._is_massive_sync = False
log.info("[MASSIVE] Massive sync complete!")
@classmethod
def is_initial_sync(cls):
"""Check if we're still in the process of initial sync."""
return cls._is_initial_sync
def is_massive_sync(cls):
"""Check if we're still in the process of massive sync."""
return cls._is_massive_sync
@classmethod
def _all_foreign_keys(cls):
@ -85,7 +91,6 @@ class DbState:
@classmethod
def _disableable_indexes(cls):
to_locate = [
'hive_blocks_created_at_idx',
'hive_feed_cache_block_num_idx',
'hive_feed_cache_created_at_idx',
'hive_feed_cache_post_id_idx',
@ -155,36 +160,46 @@ class DbState:
return False
@classmethod
def _execute_query(cls, db, query):
def _execute_query(cls, db: Db, sql: str, explain: bool = False) -> None:
time_start = perf_counter()
current_work_mem = cls.update_work_mem('2GB')
log.info("[INIT] Attempting to execute query: `%s'...", query)
log.info("[MASSIVE] Attempting to execute query: `%s'...", sql)
row = db.query_no_return(query)
cls.update_work_mem(current_work_mem)
db.explain().query_no_return(sql) if explain else db.query_no_return(sql)
time_end = perf_counter()
log.info("[INIT] Query `%s' done in %.4fs", query, time_end - time_start)
log.info("[MASSIVE] Query `%s' done in %.4fs", sql, time_end - time_start)
@classmethod
def _execute_and_explain_query(cls, db, query):
time_start = perf_counter()
def _execute_query_with_modified_work_mem(
cls, db: Db, sql: str, explain: bool = False, value: Optional[str] = None, separate_transaction: bool = True
) -> None:
divide_factor = 64
_value = value or f'{int(get_memory_amount() / divide_factor)}MB'
current_work_mem = cls.update_work_mem('2GB')
log.info("[INIT] Attempting to execute query: `%s'...", query)
sql_show_work_mem = 'SHOW work_mem;'
work_mem_before = db.query_one(sql_show_work_mem)
row = db.explain().query_no_return(query)
if separate_transaction:
db.query('START TRANSACTION')
cls.update_work_mem(current_work_mem)
db.query_no_return(sql='SET LOCAL work_mem = :work_mem', work_mem=_value)
work_mem_local = db.query_one(sql_show_work_mem)
time_end = perf_counter()
log.info("[INIT] Query `%s' done in %.4fs", query, time_end - time_start)
message = f'SET work_mem was ineffective; given: {_value} before: {work_mem_before} now: {work_mem_local}'
assert work_mem_local == _value, message
cls._execute_query(db, sql, explain)
if separate_transaction:
db.query('COMMIT')
work_mem_after = db.query_one(sql_show_work_mem)
assert work_mem_after == work_mem_before, f'work_mem was changed: {work_mem_before} -> {work_mem_after}'
@classmethod
def processing_indexes_per_table(cls, db, table_name, indexes, is_pre_process, drop, create):
log.info("[INIT] Begin %s-initial sync hooks for table %s", "pre" if is_pre_process else "post", table_name)
log.info("[MASSIVE] Begin %s-massive sync hooks for table %s", "pre" if is_pre_process else "post", table_name)
with AutoDbDisposer(db, table_name) as db_mgr:
engine = db_mgr.db.engine()
@ -214,7 +229,7 @@ class DbState:
log.info("Index %s created in time %.4f s", index.name, elapsed_time)
any_index_created = True
log.info("[INIT] End %s-initial sync hooks for table %s", "pre" if is_pre_process else "post", table_name)
log.info("[MASSIVE] End %s-massive sync hooks for table %s", "pre" if is_pre_process else "post", table_name)
@classmethod
def processing_indexes(cls, is_pre_process, drop, create):
@ -232,7 +247,7 @@ class DbState:
)
)
cls.process_tasks_in_threads("[INIT] %i threads finished creating indexes.", methods)
cls.process_tasks_in_threads("[MASSIVE] %i threads finished creating indexes.", methods)
real_time = FOSM.stop(start_time)
@ -245,16 +260,14 @@ class DbState:
log.info(f"=== {action} INDEXES ===")
@classmethod
def before_initial_sync(cls, last_imported_block, hived_head_block):
"""Routine which runs *once* after db setup.
Disables non-critical indexes for faster initial sync, as well
as foreign key constraints."""
def before_massive_sync(cls, last_imported_block: int, hived_head_block: int):
"""Disables non-critical indexes for faster sync, as well as foreign key constraints."""
cls._is_massive_sync = True
to_sync = hived_head_block - last_imported_block
if to_sync < SYNCED_BLOCK_LIMIT:
log.info("[INIT] Skipping pre-initial sync hooks")
log.info("[MASSIVE] Skipping pre-massive sync hooks")
return
# is_pre_process, drop, create
@ -268,91 +281,68 @@ class DbState:
# intentionally disabled since it needs a lot of WAL disk space when switching back to LOGGED
# set_logged_table_attribute(cls.db(), False)
log.info("[INIT] Finish pre-initial sync hooks")
log.info("[MASSIVE] Finish pre-massive sync hooks")
@classmethod
def update_work_mem(cls, workmem_value):
row = cls.db().query_row("SHOW work_mem")
current_work_mem = row['work_mem']
sql = """
DO $$
BEGIN
EXECUTE 'ALTER DATABASE '||current_database()||' SET work_mem TO "{}"';
END
$$;
"""
cls.db().query_no_return(sql.format(workmem_value))
return current_work_mem
@classmethod
def _finish_hive_posts(cls, db, massive_sync_preconditions, last_imported_block, current_imported_block):
with AutoDbDisposer(db, "finish_hive_posts") as db_mgr:
# UPDATE: `abs_rshares`, `vote_rshares`, `sc_hot`, ,`sc_trend`, `total_votes`, `net_votes`
time_start = perf_counter()
sql = f"""
SELECT update_posts_rshares({last_imported_block}, {current_imported_block});
"""
cls._execute_and_explain_query(db_mgr.db, sql)
log.info("[INIT] update_posts_rshares executed in %.4fs", perf_counter() - time_start)
sql = f"SELECT {SCHEMA_NAME}.update_posts_rshares({last_imported_block}, {current_imported_block});"
cls._execute_query_with_modified_work_mem(db=db_mgr.db, sql=sql, explain=True)
log.info("[MASSIVE] update_posts_rshares executed in %.4fs", perf_counter() - time_start)
time_start = perf_counter()
# UPDATE: `children`
if massive_sync_preconditions:
# Update count of all child posts (what was hold during initial sync)
cls._execute_query(db_mgr.db, "select update_all_hive_posts_children_count()")
# Update count of all child posts (what was hold during massive sync)
cls._execute_query_with_modified_work_mem(
db=db_mgr.db, sql=f"SELECT {SCHEMA_NAME}.update_all_hive_posts_children_count()"
)
else:
# Update count of child posts processed during partial sync (what was hold during initial sync)
sql = f"select update_hive_posts_children_count({last_imported_block}, {current_imported_block})"
cls._execute_query(db_mgr.db, sql)
log.info("[INIT] update_hive_posts_children_count executed in %.4fs", perf_counter() - time_start)
# Update count of child posts processed during partial sync (what was hold during massive sync)
sql = f"SELECT {SCHEMA_NAME}.update_hive_posts_children_count({last_imported_block}, {current_imported_block})"
cls._execute_query_with_modified_work_mem(db=db_mgr.db, sql=sql)
log.info("[MASSIVE] update_hive_posts_children_count executed in %.4fs", perf_counter() - time_start)
# UPDATE: `root_id`
# Update root_id all root posts
time_start = perf_counter()
sql = f"""
select update_hive_posts_root_id({last_imported_block}, {current_imported_block})
"""
cls._execute_query(db_mgr.db, sql)
log.info("[INIT] update_hive_posts_root_id executed in %.4fs", perf_counter() - time_start)
sql = f"SELECT {SCHEMA_NAME}.update_hive_posts_root_id({last_imported_block}, {current_imported_block});"
cls._execute_query_with_modified_work_mem(db=db_mgr.db, sql=sql)
log.info("[MASSIVE] update_hive_posts_root_id executed in %.4fs", perf_counter() - time_start)
@classmethod
def _finish_hive_posts_api_helper(cls, db, last_imported_block, current_imported_block):
with AutoDbDisposer(db, "finish_hive_posts_api_helper") as db_mgr:
time_start = perf_counter()
sql = f"""
select update_hive_posts_api_helper({last_imported_block}, {current_imported_block})
"""
cls._execute_query(db_mgr.db, sql)
log.info("[INIT] update_hive_posts_api_helper executed in %.4fs", perf_counter() - time_start)
sql = f"SELECT {SCHEMA_NAME}.update_hive_posts_api_helper({last_imported_block}, {current_imported_block});"
cls._execute_query_with_modified_work_mem(db=db_mgr.db, sql=sql)
log.info("[MASSIVE] update_hive_posts_api_helper executed in %.4fs", perf_counter() - time_start)
@classmethod
def _finish_hive_feed_cache(cls, db, last_imported_block, current_imported_block):
with AutoDbDisposer(db, "finish_hive_feed_cache") as db_mgr:
time_start = perf_counter()
sql = f"""
SELECT update_feed_cache({last_imported_block}, {current_imported_block});
"""
cls._execute_query(db_mgr.db, sql)
log.info("[INIT] update_feed_cache executed in %.4fs", perf_counter() - time_start)
sql = f"SELECT {SCHEMA_NAME}.update_feed_cache({last_imported_block}, {current_imported_block});"
cls._execute_query_with_modified_work_mem(db=db_mgr.db, sql=sql)
log.info("[MASSIVE] update_feed_cache executed in %.4fs", perf_counter() - time_start)
@classmethod
def _finish_hive_mentions(cls, db, last_imported_block, current_imported_block):
with AutoDbDisposer(db, "finish_hive_mentions") as db_mgr:
time_start = perf_counter()
sql = f"""
SELECT update_hive_posts_mentions({last_imported_block}, {current_imported_block});
"""
cls._execute_query(db_mgr.db, sql)
log.info("[INIT] update_hive_posts_mentions executed in %.4fs", perf_counter() - time_start)
sql = f"SELECT {SCHEMA_NAME}.update_hive_posts_mentions({last_imported_block}, {current_imported_block});"
cls._execute_query_with_modified_work_mem(db=db_mgr.db, sql=sql)
log.info("[MASSIVE] update_hive_posts_mentions executed in %.4fs", perf_counter() - time_start)
@classmethod
def _finish_payout_stats_view(cls):
time_start = perf_counter()
PayoutStats.generate()
log.info("[INIT] payout_stats_view executed in %.4fs", perf_counter() - time_start)
log.info("[MASSIVE] payout_stats_view executed in %.4fs", perf_counter() - time_start)
@classmethod
def _finish_account_reputations(cls, db, last_imported_block, current_imported_block):
@ -362,48 +352,40 @@ class DbState:
with AutoDbDisposer(db, "finish_account_reputations") as db_mgr:
time_start = perf_counter()
sql = f"""
SELECT update_account_reputations({last_imported_block}, {current_imported_block}, True);
"""
cls._execute_query(db_mgr.db, sql)
log.info("[INIT] update_account_reputations executed in %.4fs", perf_counter() - time_start)
sql = f"SELECT {SCHEMA_NAME}.update_account_reputations({last_imported_block}, {current_imported_block}, True);"
cls._execute_query_with_modified_work_mem(db=db_mgr.db, sql=sql)
log.info("[MASSIVE] update_account_reputations executed in %.4fs", perf_counter() - time_start)
@classmethod
def _finish_communities_posts_and_rank(cls, db):
with AutoDbDisposer(db, "finish_communities_posts_and_rank") as db_mgr:
time_start = perf_counter()
update_communities_posts_and_rank(db_mgr.db)
log.info("[INIT] update_communities_posts_and_rank executed in %.4fs", perf_counter() - time_start)
log.info("[MASSIVE] update_communities_posts_and_rank executed in %.4fs", perf_counter() - time_start)
@classmethod
def _finish_blocks_consistency_flag(cls, db, last_imported_block, current_imported_block):
with AutoDbDisposer(db, "finish_blocks_consistency_flag") as db_mgr:
time_start = perf_counter()
sql = f"""
SELECT update_hive_blocks_consistency_flag({last_imported_block}, {current_imported_block});
"""
cls._execute_query(db_mgr.db, sql)
log.info("[INIT] update_hive_blocks_consistency_flag executed in %.4fs", perf_counter() - time_start)
sql = f"SELECT {SCHEMA_NAME}.update_last_completed_block({current_imported_block});"
cls._execute_query_with_modified_work_mem(db=db_mgr.db, sql=sql)
log.info("[MASSIVE] update_last_completed_block executed in %.4fs", perf_counter() - time_start)
@classmethod
def _finish_notification_cache(cls, db):
with AutoDbDisposer(db, "finish_notification_cache") as db_mgr:
time_start = perf_counter()
sql = """
SELECT update_notification_cache(NULL, NULL, False);
"""
cls._execute_query(db_mgr.db, sql)
log.info("[INIT] update_notification_cache executed in %.4fs", perf_counter() - time_start)
sql = f"SELECT {SCHEMA_NAME}.update_notification_cache(NULL, NULL, False);"
cls._execute_query_with_modified_work_mem(db=db_mgr.db, sql=sql)
log.info("[MASSIVE] update_notification_cache executed in %.4fs", perf_counter() - time_start)
@classmethod
def _finish_follow_count(cls, db, last_imported_block, current_imported_block):
with AutoDbDisposer(db, "finish_follow_count") as db_mgr:
time_start = perf_counter()
sql = f"""
SELECT update_follow_count({last_imported_block}, {current_imported_block});
"""
cls._execute_query(db_mgr.db, sql)
log.info("[INIT] update_follow_count executed in %.4fs", perf_counter() - time_start)
sql = f"SELECT {SCHEMA_NAME}.update_follow_count({last_imported_block}, {current_imported_block});"
cls._execute_query_with_modified_work_mem(db=db_mgr.db, sql=sql)
log.info("[MASSIVE] update_follow_count executed in %.4fs", perf_counter() - time_start)
@classmethod
def time_collector(cls, func, args):
@ -413,6 +395,7 @@ class DbState:
@classmethod
def process_tasks_in_threads(cls, info, methods):
start_time = perf_counter()
futures = []
pool = ThreadPoolExecutor(max_workers=Db.max_connections)
futures = {
@ -431,7 +414,7 @@ class DbState:
raise exc
pool.shutdown()
log.info(info, completedThreads)
log.info(f'{info} Real elapsed time: {perf_counter() - start_time:.3f}', completedThreads)
@classmethod
def _finish_all_tables(cls, massive_sync_preconditions, last_imported_block, current_imported_block):
@ -439,46 +422,36 @@ class DbState:
log.info("#############################################################################")
methods = []
methods.append(
methods = [
('hive_feed_cache', cls._finish_hive_feed_cache, [cls.db(), last_imported_block, current_imported_block]),
('hive_mentions', cls._finish_hive_mentions, [cls.db(), last_imported_block, current_imported_block]),
('payout_stats_view', cls._finish_payout_stats_view, []),
('communities_posts_and_rank', cls._finish_communities_posts_and_rank, [cls.db()]),
(
'hive_posts',
cls._finish_hive_posts,
[cls.db(), massive_sync_preconditions, last_imported_block, current_imported_block],
)
)
methods.append(
('hive_feed_cache', cls._finish_hive_feed_cache, [cls.db(), last_imported_block, current_imported_block])
)
methods.append(
('hive_mentions', cls._finish_hive_mentions, [cls.db(), last_imported_block, current_imported_block])
)
methods.append(('payout_stats_view', cls._finish_payout_stats_view, []))
methods.append(('communities_posts_and_rank', cls._finish_communities_posts_and_rank, [cls.db()]))
methods.append(
),
(
'blocks_consistency_flag',
cls._finish_blocks_consistency_flag,
[cls.db(), last_imported_block, current_imported_block],
)
)
cls.process_tasks_in_threads("[INIT] %i threads finished filling tables. Part nr 0", methods)
),
]
cls.process_tasks_in_threads("[MASSIVE] %i threads finished filling tables. Part nr 0", methods)
methods = []
# Notifications are dependent on many tables, therefore it's necessary to calculate it at the end
methods.append(('notification_cache', cls._finish_notification_cache, [cls.db()]))
# hive_posts_api_helper is dependent on `hive_posts/root_id` filling
methods.append(
methods = [
('notification_cache', cls._finish_notification_cache, [cls.db()]),
('follow_count', cls._finish_follow_count, [cls.db(), last_imported_block, current_imported_block]),
(
'hive_posts_api_helper',
cls._finish_hive_posts_api_helper,
[cls.db(), last_imported_block, current_imported_block],
)
)
methods.append(
('follow_count', cls._finish_follow_count, [cls.db(), last_imported_block, current_imported_block])
)
cls.process_tasks_in_threads("[INIT] %i threads finished filling tables. Part nr 1", methods)
),
]
# Notifications are dependent on many tables, therefore it's necessary to calculate it at the end
# hive_posts_api_helper is dependent on `hive_posts/root_id` filling
cls.process_tasks_in_threads("[MASSIVE] %i threads finished filling tables. Part nr 1", methods)
real_time = FOSM.stop(start_time)
@ -491,18 +464,18 @@ class DbState:
log.info("=== FILLING FINAL DATA INTO TABLES ===")
@classmethod
def _after_initial_sync(cls, current_imported_block):
"""Routine which runs *once* after initial sync.
Re-creates non-core indexes for serving APIs after init sync,
as well as all foreign keys."""
def _after_massive_sync(cls, current_imported_block: int) -> None:
"""Re-creates non-core indexes for serving APIs after massive sync, as well as all foreign keys."""
from hive.indexer.blocks import Blocks
start_time = perf_counter()
last_imported_block = DbState.db().query_one("SELECT block_num FROM hive_state LIMIT 1")
last_imported_block = Blocks.last_completed()
log.info(
"[INIT] Current imported block: %s. Last imported block: %s.", current_imported_block, last_imported_block
"[MASSIVE] Current imported block: %s. Last imported block: %s.",
current_imported_block,
last_imported_block,
)
if last_imported_block > current_imported_block:
last_imported_block = current_imported_block
@ -524,38 +497,35 @@ class DbState:
# Update statistics and execution plans after index creation.
if massive_sync_preconditions:
cls._execute_query(cls.db(), "VACUUM (VERBOSE,ANALYZE)")
cls._execute_query(db=cls.db(), sql="VACUUM (VERBOSE,ANALYZE)")
# all post-updates are executed in different threads: one thread per one table
log.info("Filling tables with final values: started")
cls._finish_all_tables(massive_sync_preconditions, last_imported_block, current_imported_block)
log.info("Filling tables with final values: finished")
# Update a block num immediately
cls.db().query_no_return("UPDATE hive_state SET block_num = :block_num", block_num=current_imported_block)
if massive_sync_preconditions:
from hive.db.schema import create_fk
# intentionally disabled since it needs a lot of WAL disk space when switching back to LOGGED
# set_logged_table_attribute(cls.db(), True)
start_time_foreign_keys = perf_counter()
log.info("Recreating foreign keys")
create_fk(cls.db())
log.info("Foreign keys were recreated")
log.info(f"Foreign keys were recreated in {perf_counter() - start_time_foreign_keys:.3f}s")
cls._execute_query(cls.db(), "VACUUM (VERBOSE,ANALYZE)")
cls._execute_query(db=cls.db(), sql="VACUUM (VERBOSE,ANALYZE)")
end_time = perf_counter()
log.info("[INIT] After initial sync actions done in %.4fs", end_time - start_time)
log.info("[MASSIVE] After massive sync actions done in %.4fs", end_time - start_time)
@staticmethod
def status():
"""Basic health status: head block/time, current age (secs)."""
sql = "SELECT num, created_at, extract(epoch from created_at) ts " "FROM hive_blocks ORDER BY num DESC LIMIT 1"
sql = f"SELECT * FROM {SCHEMA_NAME}.get_head_state()"
row = DbState.db().query_row(sql)
return dict(
db_head_block=row['num'], db_head_time=str(row['created_at']), db_head_age=int(time.time() - row['ts'])
db_head_block=row['num'], db_head_time=str(row['created_at']), db_head_age=int(time.time() - row['age'])
)
@classmethod
@ -564,13 +534,7 @@ class DbState:
# check if database has been initialized (i.e. schema loaded)
_engine_name = cls.db().engine_name()
if _engine_name == 'postgresql':
return bool(
cls.db().query_one(
"""
SELECT 1 FROM pg_catalog.pg_tables WHERE schemaname = 'public'
"""
)
)
return bool(cls.db().query_one(f"SELECT 1 FROM pg_catalog.pg_tables WHERE schemaname = '{SCHEMA_NAME}';"))
if _engine_name == 'mysql':
return bool(cls.db().query_one('SHOW TABLES'))
raise Exception(f"unknown db engine {_engine_name}")
@ -579,6 +543,6 @@ class DbState:
def _is_feed_cache_empty(cls):
"""Check if the hive_feed_cache table is empty.
If empty, it indicates that the initial sync has not finished.
If empty, it indicates that the massive sync has not finished.
"""
return not cls.db().query_one("SELECT 1 FROM hive_feed_cache LIMIT 1")
return not cls.db().query_one(f"SELECT 1 FROM {SCHEMA_NAME}.hive_feed_cache LIMIT 1")

View File

@ -1,6 +1,7 @@
"""Db schema definitions and setup routines."""
import logging
from pathlib import Path
import sqlalchemy as sa
from sqlalchemy.sql import text as sql_text
@ -10,35 +11,23 @@ from sqlalchemy.types import SMALLINT
from sqlalchemy.types import TEXT
from sqlalchemy.types import VARCHAR
log = logging.getLogger(__name__)
from hive.conf import SCHEMA_NAME
from hive.indexer.hive_db.haf_functions import context_attach, context_detach, prepare_app_context
log = logging.getLogger(__name__)
# pylint: disable=line-too-long, too-many-lines, bad-whitespace
def build_metadata():
"""Build schema def with SqlAlchemy"""
metadata = sa.MetaData()
sa.Table(
'hive_blocks',
metadata,
sa.Column('num', sa.Integer, primary_key=True, autoincrement=False),
sa.Column('hash', CHAR(40), nullable=False),
sa.Column('prev', CHAR(40)),
sa.Column('txs', SMALLINT, server_default='0', nullable=False),
sa.Column('ops', sa.Integer, server_default='0', nullable=False),
sa.Column('created_at', sa.DateTime, nullable=False),
sa.Column('completed', sa.Boolean, nullable=False, server_default='0'),
sa.UniqueConstraint('hash', name='hive_blocks_ux1'),
sa.ForeignKeyConstraint(['prev'], ['hive_blocks.hash'], name='hive_blocks_fk1'),
sa.Index('hive_blocks_created_at_idx', 'created_at'),
sa.Index('hive_blocks_completed_idx', 'completed'),
)
metadata = sa.MetaData(schema=SCHEMA_NAME)
hive_rowid_seq = sa.Sequence('hive.hivemind_app_hive_rowid_seq', metadata=metadata)
sa.Table(
'hive_accounts',
metadata,
sa.Column('hive_rowid', sa.BigInteger, server_default=hive_rowid_seq.next_value(), nullable=False),
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('name', VARCHAR(16, collation='C'), nullable=False),
sa.Column('created_at', sa.DateTime, nullable=False),
@ -58,6 +47,7 @@ def build_metadata():
sa.Table(
'hive_reputation_data',
metadata,
sa.Column('hive_rowid', sa.BigInteger, server_default=hive_rowid_seq.next_value(), nullable=False),
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('author_id', sa.Integer, nullable=False),
sa.Column('voter_id', sa.Integer, nullable=False),
@ -71,6 +61,7 @@ def build_metadata():
sa.Table(
'hive_posts',
metadata,
sa.Column('hive_rowid', sa.BigInteger, server_default=hive_rowid_seq.next_value(), nullable=False),
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('root_id', sa.Integer, nullable=False), # records having initially set 0 will be updated to their id
sa.Column('parent_id', sa.Integer, nullable=False),
@ -122,9 +113,9 @@ def build_metadata():
sa.Column('block_num', sa.Integer, nullable=False),
sa.Column('block_num_created', sa.Integer, nullable=False),
sa.Column('tags_ids', sa.ARRAY(sa.Integer), nullable=True),
sa.ForeignKeyConstraint(['author_id'], ['hive_accounts.id'], name='hive_posts_fk1'),
sa.ForeignKeyConstraint(['root_id'], ['hive_posts.id'], name='hive_posts_fk2'),
sa.ForeignKeyConstraint(['parent_id'], ['hive_posts.id'], name='hive_posts_fk3'),
sa.ForeignKeyConstraint(['author_id'], ['hive_accounts.id'], name='hive_posts_fk1', deferrable=True),
sa.ForeignKeyConstraint(['root_id'], ['hive_posts.id'], name='hive_posts_fk2', deferrable=True),
sa.ForeignKeyConstraint(['parent_id'], ['hive_posts.id'], name='hive_posts_fk3', deferrable=True),
sa.UniqueConstraint('author_id', 'permlink_id', 'counter_deleted', name='hive_posts_ux1'),
sa.Index('hive_posts_depth_idx', 'depth'),
sa.Index('hive_posts_root_id_id_idx', 'root_id', 'id'),
@ -198,6 +189,7 @@ def build_metadata():
sa.Table(
'hive_post_data',
metadata,
sa.Column('hive_rowid', sa.BigInteger, server_default=hive_rowid_seq.next_value(), nullable=False),
sa.Column('id', sa.Integer, primary_key=True, autoincrement=False),
sa.Column('title', VARCHAR(512), nullable=False, server_default=''),
sa.Column('preview', VARCHAR(1024), nullable=False, server_default=''), # first 1k of 'body'
@ -209,6 +201,7 @@ def build_metadata():
sa.Table(
'hive_permlink_data',
metadata,
sa.Column('hive_rowid', sa.BigInteger, server_default=hive_rowid_seq.next_value(), nullable=False),
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('permlink', sa.String(255, collation='C'), nullable=False),
sa.UniqueConstraint('permlink', name='hive_permlink_data_permlink'),
@ -217,6 +210,7 @@ def build_metadata():
sa.Table(
'hive_category_data',
metadata,
sa.Column('hive_rowid', sa.BigInteger, server_default=hive_rowid_seq.next_value(), nullable=False),
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('category', sa.String(255, collation='C'), nullable=False),
sa.UniqueConstraint('category', name='hive_category_data_category'),
@ -225,6 +219,7 @@ def build_metadata():
sa.Table(
'hive_votes',
metadata,
sa.Column('hive_rowid', sa.BigInteger, server_default=hive_rowid_seq.next_value(), nullable=False),
sa.Column('id', sa.BigInteger, primary_key=True),
sa.Column('post_id', sa.Integer, nullable=False),
sa.Column('voter_id', sa.Integer, nullable=False),
@ -240,11 +235,10 @@ def build_metadata():
sa.UniqueConstraint(
'voter_id', 'author_id', 'permlink_id', name='hive_votes_voter_id_author_id_permlink_id_uk'
),
sa.ForeignKeyConstraint(['post_id'], ['hive_posts.id'], name='hive_votes_fk1'),
sa.ForeignKeyConstraint(['voter_id'], ['hive_accounts.id'], name='hive_votes_fk2'),
sa.ForeignKeyConstraint(['author_id'], ['hive_accounts.id'], name='hive_votes_fk3'),
sa.ForeignKeyConstraint(['permlink_id'], ['hive_permlink_data.id'], name='hive_votes_fk4'),
sa.ForeignKeyConstraint(['block_num'], ['hive_blocks.num'], name='hive_votes_fk5'),
sa.ForeignKeyConstraint(['post_id'], ['hive_posts.id'], name='hive_votes_fk1', deferrable=True),
sa.ForeignKeyConstraint(['voter_id'], ['hive_accounts.id'], name='hive_votes_fk2', deferrable=True),
sa.ForeignKeyConstraint(['author_id'], ['hive_accounts.id'], name='hive_votes_fk3', deferrable=True),
sa.ForeignKeyConstraint(['permlink_id'], ['hive_permlink_data.id'], name='hive_votes_fk4', deferrable=True),
sa.Index(
'hive_votes_voter_id_post_id_idx', 'voter_id', 'post_id'
), # probably this index is redundant to hive_votes_voter_id_last_update_idx because of starting voter_id.
@ -265,6 +259,7 @@ def build_metadata():
sa.Table(
'hive_tag_data',
metadata,
sa.Column('hive_rowid', sa.BigInteger, server_default=hive_rowid_seq.next_value(), nullable=False),
sa.Column('id', sa.Integer, nullable=False, primary_key=True),
sa.Column('tag', VARCHAR(64, collation='C'), nullable=False, server_default=''),
sa.UniqueConstraint('tag', name='hive_tag_data_ux1'),
@ -273,6 +268,7 @@ def build_metadata():
sa.Table(
'hive_follows',
metadata,
sa.Column('hive_rowid', sa.BigInteger, server_default=hive_rowid_seq.next_value(), nullable=False),
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('follower', sa.Integer, nullable=False),
sa.Column('following', sa.Integer, nullable=False),
@ -283,7 +279,6 @@ def build_metadata():
sa.Column('follow_muted', BOOLEAN, nullable=False, server_default='0'),
sa.Column('block_num', sa.Integer, nullable=False),
sa.UniqueConstraint('following', 'follower', name='hive_follows_ux1'), # core
sa.ForeignKeyConstraint(['block_num'], ['hive_blocks.num'], name='hive_follows_fk1'),
sa.Index('hive_follows_following_state_idx', 'following', 'state'),
sa.Index('hive_follows_follower_state_idx', 'follower', 'state'),
sa.Index('hive_follows_follower_following_state_idx', 'follower', 'following', 'state'),
@ -294,14 +289,14 @@ def build_metadata():
sa.Table(
'hive_reblogs',
metadata,
sa.Column('hive_rowid', sa.BigInteger, server_default=hive_rowid_seq.next_value(), nullable=False),
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('blogger_id', sa.Integer, nullable=False),
sa.Column('post_id', sa.Integer, nullable=False),
sa.Column('created_at', sa.DateTime, nullable=False),
sa.Column('block_num', sa.Integer, nullable=False),
sa.ForeignKeyConstraint(['blogger_id'], ['hive_accounts.id'], name='hive_reblogs_fk1'),
sa.ForeignKeyConstraint(['post_id'], ['hive_posts.id'], name='hive_reblogs_fk2'),
sa.ForeignKeyConstraint(['block_num'], ['hive_blocks.num'], name='hive_reblogs_fk3'),
sa.ForeignKeyConstraint(['blogger_id'], ['hive_accounts.id'], name='hive_reblogs_fk1', deferrable=True),
sa.ForeignKeyConstraint(['post_id'], ['hive_posts.id'], name='hive_reblogs_fk2', deferrable=True),
sa.UniqueConstraint('blogger_id', 'post_id', name='hive_reblogs_ux1'), # core
sa.Index('hive_reblogs_post_id', 'post_id'),
sa.Index('hive_reblogs_block_num_idx', 'block_num'),
@ -311,6 +306,7 @@ def build_metadata():
sa.Table(
'hive_payments',
metadata,
sa.Column('hive_rowid', sa.BigInteger, server_default=hive_rowid_seq.next_value(), nullable=False),
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('block_num', sa.Integer, nullable=False),
sa.Column('tx_idx', SMALLINT, nullable=False),
@ -319,9 +315,9 @@ def build_metadata():
sa.Column('to_account', sa.Integer, nullable=False),
sa.Column('amount', sa.types.DECIMAL(10, 3), nullable=False),
sa.Column('token', VARCHAR(5), nullable=False),
sa.ForeignKeyConstraint(['from_account'], ['hive_accounts.id'], name='hive_payments_fk1'),
sa.ForeignKeyConstraint(['to_account'], ['hive_accounts.id'], name='hive_payments_fk2'),
sa.ForeignKeyConstraint(['post_id'], ['hive_posts.id'], name='hive_payments_fk3'),
sa.ForeignKeyConstraint(['from_account'], ['hive_accounts.id'], name='hive_payments_fk1', deferrable=True),
sa.ForeignKeyConstraint(['to_account'], ['hive_accounts.id'], name='hive_payments_fk2', deferrable=True),
sa.ForeignKeyConstraint(['post_id'], ['hive_posts.id'], name='hive_payments_fk3', deferrable=True),
sa.Index('hive_payments_from', 'from_account'),
sa.Index('hive_payments_to', 'to_account'),
sa.Index('hive_payments_post_id', 'post_id'),
@ -330,12 +326,12 @@ def build_metadata():
sa.Table(
'hive_feed_cache',
metadata,
sa.Column('hive_rowid', sa.BigInteger, server_default=hive_rowid_seq.next_value(), nullable=False),
sa.Column('post_id', sa.Integer, nullable=False),
sa.Column('account_id', sa.Integer, nullable=False),
sa.Column('created_at', sa.DateTime, nullable=False),
sa.Column('block_num', sa.Integer, nullable=False),
sa.PrimaryKeyConstraint('account_id', 'post_id', name='hive_feed_cache_pk'),
sa.ForeignKeyConstraint(['block_num'], ['hive_blocks.num'], name='hive_feed_cache_fk1'),
sa.Index('hive_feed_cache_block_num_idx', 'block_num'),
sa.Index('hive_feed_cache_created_at_idx', 'created_at'),
sa.Index('hive_feed_cache_post_id_idx', 'post_id'),
@ -347,13 +343,17 @@ def build_metadata():
sa.Table(
'hive_state',
metadata,
sa.Column('block_num', sa.Integer, primary_key=True, autoincrement=False),
sa.Column('hive_rowid', sa.BigInteger, server_default=hive_rowid_seq.next_value(), nullable=False),
sa.Column('last_imported_block_num', sa.Integer, nullable=False),
sa.Column('last_imported_block_date', sa.DateTime, nullable=False),
sa.Column('last_completed_block_num', sa.Integer, nullable=False),
sa.Column('db_version', sa.Integer, nullable=False),
)
sa.Table(
'hive_posts_api_helper',
metadata,
sa.Column('hive_rowid', sa.BigInteger, server_default=hive_rowid_seq.next_value(), nullable=False),
sa.Column('id', sa.Integer, primary_key=True, autoincrement=False),
sa.Column(
'author_s_permlink', VARCHAR(275, collation='C'), nullable=False
@ -364,22 +364,23 @@ def build_metadata():
sa.Table(
'hive_mentions',
metadata,
sa.Column('hive_rowid', sa.BigInteger, server_default=hive_rowid_seq.next_value(), nullable=False),
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('post_id', sa.Integer, nullable=False),
sa.Column('account_id', sa.Integer, nullable=False),
sa.Column('block_num', sa.Integer, nullable=False),
sa.ForeignKeyConstraint(['post_id'], ['hive_posts.id'], name='hive_mentions_fk1'),
sa.ForeignKeyConstraint(['account_id'], ['hive_accounts.id'], name='hive_mentions_fk2'),
sa.ForeignKeyConstraint(['post_id'], ['hive_posts.id'], name='hive_mentions_fk1', deferrable=True),
sa.ForeignKeyConstraint(['account_id'], ['hive_accounts.id'], name='hive_mentions_fk2', deferrable=True),
sa.Index('hive_mentions_account_id_idx', 'account_id'),
sa.UniqueConstraint('post_id', 'account_id', 'block_num', name='hive_mentions_ux1'),
)
metadata = build_metadata_community(metadata)
metadata = build_metadata_community(hive_rowid_seq, metadata)
return metadata
def build_metadata_community(metadata=None):
def build_metadata_community(hive_rowid_seq: sa.Sequence, metadata=None):
"""Build community schema defs"""
if not metadata:
metadata = sa.MetaData()
@ -387,6 +388,7 @@ def build_metadata_community(metadata=None):
sa.Table(
'hive_communities',
metadata,
sa.Column('hive_rowid', sa.BigInteger, server_default=hive_rowid_seq.next_value(), nullable=False),
sa.Column('id', sa.Integer, primary_key=True, autoincrement=False),
sa.Column('type_id', SMALLINT, nullable=False),
sa.Column('lang', CHAR(2), nullable=False, server_default='en'),
@ -415,6 +417,7 @@ def build_metadata_community(metadata=None):
sa.Table(
'hive_roles',
metadata,
sa.Column('hive_rowid', sa.BigInteger, server_default=hive_rowid_seq.next_value(), nullable=False),
sa.Column('account_id', sa.Integer, nullable=False),
sa.Column('community_id', sa.Integer, nullable=False),
sa.Column('created_at', sa.DateTime, nullable=False),
@ -427,6 +430,7 @@ def build_metadata_community(metadata=None):
sa.Table(
'hive_subscriptions',
metadata,
sa.Column('hive_rowid', sa.BigInteger, server_default=hive_rowid_seq.next_value(), nullable=False),
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('account_id', sa.Integer, nullable=False),
sa.Column('community_id', sa.Integer, nullable=False),
@ -440,6 +444,7 @@ def build_metadata_community(metadata=None):
sa.Table(
'hive_notifs',
metadata,
sa.Column('hive_rowid', sa.BigInteger, server_default=hive_rowid_seq.next_value(), nullable=False),
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('block_num', sa.Integer, nullable=False),
sa.Column('type_id', SMALLINT, nullable=False),
@ -480,6 +485,7 @@ def build_metadata_community(metadata=None):
sa.Table(
'hive_notification_cache',
metadata,
sa.Column('hive_rowid', sa.BigInteger, server_default=hive_rowid_seq.next_value(), nullable=False),
sa.Column('id', sa.BigInteger, primary_key=True),
sa.Column('block_num', sa.Integer, nullable=False),
sa.Column('type_id', sa.Integer, nullable=False),
@ -508,21 +514,19 @@ def drop_fk(db):
db.query_no_return("START TRANSACTION")
for table in build_metadata().sorted_tables:
for fk in table.foreign_keys:
sql = f"""ALTER TABLE {table.name} DROP CONSTRAINT IF EXISTS {fk.name}"""
sql = f"""ALTER TABLE {SCHEMA_NAME}.{table.name} DROP CONSTRAINT IF EXISTS {fk.name}"""
db.query_no_return(sql)
db.query_no_return("COMMIT")
def create_fk(db):
from sqlalchemy.schema import AddConstraint
from sqlalchemy import text
connection = db.get_new_connection('create_fk')
connection.execute(text("START TRANSACTION"))
db.query_no_return("START TRANSACTION")
for table in build_metadata().sorted_tables:
for fk in table.foreign_keys:
connection.execute(AddConstraint(fk.constraint))
connection.execute(text("COMMIT"))
db.query_no_return(AddConstraint(fk.constraint), is_prepared=True)
db.query_no_return("COMMIT")
def setup(db):
@ -531,7 +535,9 @@ def setup(db):
sql = """SELECT * FROM pg_extension WHERE extname='intarray'"""
assert db.query_row(sql), "The database requires created 'intarray' extension"
# initialize schema
# create schema and aux functions
db.query(f'CREATE SCHEMA IF NOT EXISTS {SCHEMA_NAME};')
prepare_app_context(db=db)
build_metadata().create_all(db.engine())
# tune auto vacuum/analyze
@ -540,21 +546,30 @@ def setup(db):
# sets FILLFACTOR:
set_fillfactor(db)
# apply inheritance
for table in build_metadata().sorted_tables:
if table.name in ('hive_db_patch_level',):
continue
sql = f'ALTER TABLE {SCHEMA_NAME}.{table.name} INHERIT hive.{SCHEMA_NAME};'
db.query(sql)
context_detach(db=db)
# default rows
sqls = [
"INSERT INTO hive_state (block_num, db_version) VALUES (0, 0)",
"INSERT INTO hive_blocks (num, hash, created_at, completed) VALUES (0, '0000000000000000000000000000000000000000', '2016-03-24 16:04:57', true)",
"INSERT INTO hive_permlink_data (id, permlink) VALUES (0, '')",
"INSERT INTO hive_category_data (id, category) VALUES (0, '')",
"INSERT INTO hive_tag_data (id, tag) VALUES (0, '')",
"INSERT INTO hive_accounts (id, name, created_at) VALUES (0, '', '1970-01-01T00:00:00')",
"INSERT INTO hive_accounts (name, created_at) VALUES ('miners', '2016-03-24 16:05:00')",
"INSERT INTO hive_accounts (name, created_at) VALUES ('null', '2016-03-24 16:05:00')",
"INSERT INTO hive_accounts (name, created_at) VALUES ('temp', '2016-03-24 16:05:00')",
"INSERT INTO hive_accounts (name, created_at) VALUES ('initminer', '2016-03-24 16:05:00')",
"""
f"INSERT INTO {SCHEMA_NAME}.hive_state (last_imported_block_num, last_imported_block_date, last_completed_block_num, db_version) VALUES (1, '1970-01-01T00:00:00', 1, 0)",
f"INSERT INTO {SCHEMA_NAME}.hive_permlink_data (id, permlink) VALUES (0, '')",
f"INSERT INTO {SCHEMA_NAME}.hive_category_data (id, category) VALUES (0, '')",
f"INSERT INTO {SCHEMA_NAME}.hive_tag_data (id, tag) VALUES (0, '')",
f"INSERT INTO {SCHEMA_NAME}.hive_accounts (id, name, created_at) VALUES (0, '', '1970-01-01T00:00:00')",
f"INSERT INTO {SCHEMA_NAME}.hive_accounts (name, created_at) VALUES ('miners', '2016-03-24 16:05:00')",
f"INSERT INTO {SCHEMA_NAME}.hive_accounts (name, created_at) VALUES ('null', '2016-03-24 16:05:00')",
f"INSERT INTO {SCHEMA_NAME}.hive_accounts (name, created_at) VALUES ('temp', '2016-03-24 16:05:00')",
f"INSERT INTO {SCHEMA_NAME}.hive_accounts (name, created_at) VALUES ('initminer', '2016-03-24 16:05:00')",
f"""
INSERT INTO
public.hive_posts(id, root_id, parent_id, author_id, permlink_id, category_id,
{SCHEMA_NAME}.hive_posts(id, root_id, parent_id, author_id, permlink_id, category_id,
community_id, created_at, depth, block_num, block_num_created
)
VALUES
@ -564,7 +579,7 @@ def setup(db):
for sql in sqls:
db.query(sql)
sql = "CREATE INDEX hive_communities_ft1 ON hive_communities USING GIN (to_tsvector('english', title || ' ' || about))"
sql = f"CREATE INDEX hive_communities_ft1 ON {SCHEMA_NAME}.hive_communities USING GIN (to_tsvector('english', title || ' ' || about))"
db.query(sql)
# find_comment_id definition moved to utility_functions.sql
@ -595,27 +610,8 @@ def setup(db):
# database_api_vote, find_votes, list_votes_by_voter_comment, list_votes_by_comment_voter moved into database_api_list_votes.sql
sql = """
DO $$
DECLARE
__version INT;
BEGIN
SELECT CURRENT_SETTING('server_version_num')::INT INTO __version;
EXECUTE 'ALTER DATABASE '||current_database()||' SET join_collapse_limit TO 16';
EXECUTE 'ALTER DATABASE '||current_database()||' SET from_collapse_limit TO 16';
IF __version >= 120000 THEN
RAISE NOTICE 'Disabling a JIT optimization on the current database level...';
EXECUTE 'ALTER DATABASE '||current_database()||' SET jit TO False';
END IF;
END
$$;
"""
db.query_no_return(sql)
sql = """
CREATE TABLE IF NOT EXISTS hive_db_patch_level
sql = f"""
CREATE TABLE IF NOT EXISTS {SCHEMA_NAME}.hive_db_patch_level
(
level SERIAL NOT NULL PRIMARY KEY,
patch_date timestamp without time zone NOT NULL,
@ -695,26 +691,26 @@ def setup(db):
"is_superuser.sql",
"update_hive_blocks_consistency_flag.sql",
"update_table_statistics.sql",
"upgrade/update_db_patchlevel.sql",
# Additionally execute db patchlevel import to mark (already done) upgrade changes and avoid its reevaluation during next upgrade.
"upgrade/update_db_patchlevel.sql", # Additionally execute db patchlevel import to mark (already done) upgrade changes and avoid its reevaluation during next upgrade.
"hafapp_api.sql",
]
from os.path import dirname, realpath
dir_path = dirname(realpath(__file__))
sql_scripts_dir_path = Path(__file__).parent / 'sql_scripts'
for script in sql_scripts:
execute_sql_script(db.query_no_return, f"{dir_path}/sql_scripts/{script}")
execute_sql_script(db.query_no_return, sql_scripts_dir_path / script)
# Move this part here, to mark latest db patch level as current Hivemind revision (which just created schema).
sql = """
INSERT INTO hive_db_patch_level
sql = f"""
INSERT INTO {SCHEMA_NAME}.hive_db_patch_level
(patch_date, patched_to_revision)
values
(now(), '{}');
(now(), '{{}}');
"""
from hive.version import GIT_REVISION
db.query_no_return(sql.format(GIT_REVISION))
context_attach(db=db, block_number=0)
def reset_autovac(db):
@ -728,17 +724,18 @@ def reset_autovac(db):
'hive_posts': (2500, 10000),
'hive_follows': (5000, 5000),
'hive_feed_cache': (5000, 5000),
'hive_blocks': (5000, 25000),
'hive_reblogs': (5000, 5000),
'hive_payments': (5000, 5000),
}
for table, (n_vacuum, n_analyze) in autovac_config.items():
sql = """ALTER TABLE %s SET (autovacuum_vacuum_scale_factor = 0,
autovacuum_vacuum_threshold = %s,
autovacuum_analyze_scale_factor = 0,
autovacuum_analyze_threshold = %s)"""
db.query(sql % (table, n_vacuum, n_analyze))
sql = f"""
ALTER TABLE {SCHEMA_NAME}.{table} SET (autovacuum_vacuum_scale_factor = 0,
autovacuum_vacuum_threshold = {n_vacuum},
autovacuum_analyze_scale_factor = 0,
autovacuum_analyze_threshold = {n_analyze});
"""
db.query(sql)
def set_fillfactor(db):
@ -747,8 +744,8 @@ def set_fillfactor(db):
fillfactor_config = {'hive_posts': 70, 'hive_post_data': 70, 'hive_votes': 70, 'hive_reputation_data': 50}
for table, fillfactor in fillfactor_config.items():
sql = """ALTER TABLE {} SET (FILLFACTOR = {})"""
db.query(sql.format(table, fillfactor))
sql = f"ALTER TABLE {SCHEMA_NAME}.{table} SET (FILLFACTOR = {fillfactor});"
db.query(sql)
def set_logged_table_attribute(db, logged):

View File

@ -1,13 +1,13 @@
DROP FUNCTION IF EXISTS bridge_get_account_posts_by_blog;
DROP FUNCTION IF EXISTS hivemind_app.bridge_get_account_posts_by_blog;
CREATE OR REPLACE FUNCTION bridge_get_account_posts_by_blog(
CREATE OR REPLACE FUNCTION hivemind_app.bridge_get_account_posts_by_blog(
in _account VARCHAR,
in _author VARCHAR,
in _permlink VARCHAR,
in _limit INTEGER,
in _bridge_api BOOLEAN
)
RETURNS SETOF bridge_api_post
RETURNS SETOF hivemind_app.bridge_api_post
AS
$function$
DECLARE
@ -15,11 +15,11 @@ DECLARE
__account_id INTEGER;
__created_at TIMESTAMP;
BEGIN
__account_id = find_account_id( _account, True );
__post_id = find_comment_id( _author, _permlink, True );
__account_id = hivemind_app.find_account_id( _account, True );
__post_id = hivemind_app.find_comment_id( _author, _permlink, True );
IF __post_id <> 0 THEN
SELECT hfc.created_at INTO __created_at
FROM hive_feed_cache hfc
FROM hivemind_app.hive_feed_cache hfc
WHERE hfc.account_id = __account_id AND hfc.post_id = __post_id;
END IF;
@ -30,14 +30,14 @@ BEGIN
SELECT
hfc.post_id,
hfc.created_at
FROM hive_feed_cache hfc
FROM hivemind_app.hive_feed_cache hfc
WHERE hfc.account_id = __account_id
AND ( __post_id = 0 OR hfc.created_at < __created_at
OR (hfc.created_at = __created_at AND hfc.post_id < __post_id) )
AND ( NOT _bridge_api OR
NOT EXISTS (SELECT NULL FROM live_posts_comments_view hp1 --should this just be live_posts_view?
NOT EXISTS (SELECT NULL FROM hivemind_app.live_posts_comments_view hp1 --should this just be live_posts_view?
WHERE hp1.id = hfc.post_id AND hp1.community_id IS NOT NULL
AND NOT EXISTS (SELECT NULL FROM hive_reblogs hr WHERE hr.blogger_id = __account_id AND hr.post_id = hp1.id)
AND NOT EXISTS (SELECT NULL FROM hivemind_app.hive_reblogs hr WHERE hr.blogger_id = __account_id AND hr.post_id = hp1.id)
)
)
ORDER BY hfc.created_at DESC, hfc.post_id DESC
@ -83,7 +83,7 @@ BEGIN
hp.is_muted,
NULL
FROM blog,
LATERAL get_post_view_by_id(blog.post_id) hp
LATERAL hivemind_app.get_post_view_by_id(blog.post_id) hp
ORDER BY blog.created_at DESC, blog.post_id DESC
LIMIT _limit;
END

View File

@ -1,20 +1,20 @@
DROP FUNCTION IF EXISTS bridge_get_account_posts_by_comments;
DROP FUNCTION IF EXISTS hivemind_app.bridge_get_account_posts_by_comments;
CREATE FUNCTION bridge_get_account_posts_by_comments( in _account VARCHAR, in _author VARCHAR, in _permlink VARCHAR, in _limit SMALLINT )
RETURNS SETOF bridge_api_post
CREATE FUNCTION hivemind_app.bridge_get_account_posts_by_comments( in _account VARCHAR, in _author VARCHAR, in _permlink VARCHAR, in _limit SMALLINT )
RETURNS SETOF hivemind_app.bridge_api_post
AS
$function$
DECLARE
__account_id INT;
__post_id INT;
BEGIN
__account_id = find_account_id( _account, True );
__post_id = find_comment_id( _author, _permlink, True );
__account_id = hivemind_app.find_account_id( _account, True );
__post_id = hivemind_app.find_comment_id( _author, _permlink, True );
RETURN QUERY
WITH ds AS MATERIALIZED --bridge_get_account_posts_by_comments
(
SELECT hp1.id
FROM live_comments_view hp1
FROM hivemind_app.live_comments_view hp1
WHERE hp1.author_id = __account_id
AND (__post_id = 0 OR hp1.id < __post_id)
ORDER BY hp1.id DESC
@ -60,7 +60,7 @@ BEGIN
hp.is_muted,
NULL
FROM ds,
LATERAL get_post_view_by_id(ds.id) hp
LATERAL hivemind_app.get_post_view_by_id(ds.id) hp
ORDER BY ds.id DESC
LIMIT _limit;
END

View File

@ -1,18 +1,18 @@
DROP FUNCTION IF EXISTS bridge_get_account_posts_by_payout;
DROP FUNCTION IF EXISTS hivemind_app.bridge_get_account_posts_by_payout;
CREATE FUNCTION bridge_get_account_posts_by_payout( in _account VARCHAR, in _author VARCHAR, in _permlink VARCHAR, in _limit SMALLINT )
RETURNS SETOF bridge_api_post
CREATE FUNCTION hivemind_app.bridge_get_account_posts_by_payout( in _account VARCHAR, in _author VARCHAR, in _permlink VARCHAR, in _limit SMALLINT )
RETURNS SETOF hivemind_app.bridge_api_post
AS
$function$
DECLARE
__account_id INT;
__post_id INT;
__payout_limit hive_posts.payout%TYPE;
__payout_limit hivemind_app.hive_posts.payout%TYPE;
BEGIN
__account_id = find_account_id( _account, True );
__post_id = find_comment_id( _author, _permlink, True );
__account_id = hivemind_app.find_account_id( _account, True );
__post_id = hivemind_app.find_comment_id( _author, _permlink, True );
IF __post_id <> 0 THEN
SELECT ( hp.payout + hp.pending_payout ) INTO __payout_limit FROM hive_posts hp WHERE hp.id = __post_id;
SELECT ( hp.payout + hp.pending_payout ) INTO __payout_limit FROM hivemind_app.hive_posts hp WHERE hp.id = __post_id;
END IF;
RETURN QUERY
WITH payouts AS MATERIALIZED -- bridge_get_account_posts_by_payout
@ -20,7 +20,7 @@ BEGIN
SELECT
id,
(hp.payout + hp.pending_payout) as total_payout
FROM live_posts_comments_view hp
FROM hivemind_app.live_posts_comments_view hp
WHERE
hp.author_id = __account_id
AND NOT hp.is_paidout
@ -69,7 +69,7 @@ BEGIN
hp.is_muted,
NULL
FROM payouts,
LATERAL get_post_view_by_id(payouts.id) hp
LATERAL hivemind_app.get_post_view_by_id(payouts.id) hp
ORDER BY payouts.total_payout DESC, payouts.id DESC
LIMIT _limit;
END

View File

@ -1,20 +1,20 @@
DROP FUNCTION IF EXISTS bridge_get_account_posts_by_posts;
DROP FUNCTION IF EXISTS hivemind_app.bridge_get_account_posts_by_posts;
CREATE FUNCTION bridge_get_account_posts_by_posts( in _account VARCHAR, in _author VARCHAR, in _permlink VARCHAR, in _limit SMALLINT )
RETURNS SETOF bridge_api_post
CREATE FUNCTION hivemind_app.bridge_get_account_posts_by_posts( in _account VARCHAR, in _author VARCHAR, in _permlink VARCHAR, in _limit SMALLINT )
RETURNS SETOF hivemind_app.bridge_api_post
AS
$function$
DECLARE
__account_id INT;
__post_id INT;
BEGIN
__account_id = find_account_id( _account, True );
__post_id = find_comment_id( _author, _permlink, True );
__account_id = hivemind_app.find_account_id( _account, True );
__post_id = hivemind_app.find_comment_id( _author, _permlink, True );
RETURN QUERY
WITH posts AS MATERIALIZED -- bridge_get_account_posts_by_posts
(
SELECT id
FROM live_posts_view hp
FROM hivemind_app.live_posts_view hp
WHERE
hp.author_id = __account_id
AND ( __post_id = 0 OR hp.id < __post_id )
@ -61,7 +61,7 @@ BEGIN
hp.is_muted,
NULL
FROM posts,
LATERAL get_post_view_by_id(posts.id) hp
LATERAL hivemind_app.get_post_view_by_id(posts.id) hp
ORDER BY posts.id DESC
LIMIT _limit;
END

View File

@ -1,4 +1,4 @@
CREATE OR REPLACE FUNCTION bridge_get_account_posts_by_replies(_account VARCHAR, _author VARCHAR, _permlink VARCHAR, _limit SMALLINT, _bridge_api BOOLEAN) RETURNS SETOF bridge_api_post
CREATE OR REPLACE FUNCTION hivemind_app.bridge_get_account_posts_by_replies(_account VARCHAR, _author VARCHAR, _permlink VARCHAR, _limit SMALLINT, _bridge_api BOOLEAN) RETURNS SETOF hivemind_app.bridge_api_post
AS $function$
DECLARE
__account_id INT;
@ -6,22 +6,22 @@ DECLARE
BEGIN
IF NOT _bridge_api AND _permlink <> '' THEN
-- find blogger account using parent author of page defining post
__post_id = find_comment_id( _author, _permlink, True );
__post_id = hivemind_app.find_comment_id( _author, _permlink, True );
SELECT pp.author_id INTO __account_id
FROM hive_posts hp
JOIN hive_posts pp ON hp.parent_id = pp.id
FROM hivemind_app.hive_posts hp
JOIN hivemind_app.hive_posts pp ON hp.parent_id = pp.id
WHERE hp.id = __post_id;
IF __account_id = 0 THEN __account_id = NULL; END IF;
ELSE
__account_id = find_account_id( _account, True );
__post_id = find_comment_id( _author, _permlink, True );
__account_id = hivemind_app.find_account_id( _account, True );
__post_id = hivemind_app.find_comment_id( _author, _permlink, True );
END IF;
RETURN QUERY
WITH replies AS MATERIALIZED --bridge_get_account_posts_by_replies
(
SELECT hpr.id
FROM live_posts_comments_view hpr
JOIN hive_posts hp1 ON hp1.id = hpr.parent_id
FROM hivemind_app.live_posts_comments_view hpr
JOIN hivemind_app.hive_posts hp1 ON hp1.id = hpr.parent_id
WHERE hp1.author_id = __account_id
AND (__post_id = 0 OR hpr.id < __post_id )
ORDER BY hpr.id + 1 DESC
@ -67,7 +67,7 @@ BEGIN
hp.is_muted,
NULL
FROM replies,
LATERAL get_post_view_by_id(replies.id) hp
LATERAL hivemind_app.get_post_view_by_id(replies.id) hp
ORDER BY replies.id DESC
LIMIT _limit;
END

View File

@ -1,7 +1,7 @@
DROP FUNCTION IF EXISTS bridge_get_by_feed_with_reblog;
DROP FUNCTION IF EXISTS hivemind_app.bridge_get_by_feed_with_reblog;
CREATE OR REPLACE FUNCTION bridge_get_by_feed_with_reblog( IN _account VARCHAR, IN _author VARCHAR, IN _permlink VARCHAR, IN _limit INTEGER)
RETURNS SETOF bridge_api_post_reblogs
CREATE OR REPLACE FUNCTION hivemind_app.bridge_get_by_feed_with_reblog( IN _account VARCHAR, IN _author VARCHAR, IN _permlink VARCHAR, IN _limit INTEGER)
RETURNS SETOF hivemind_app.bridge_api_post_reblogs
LANGUAGE 'plpgsql'
STABLE
ROWS 1000
@ -12,16 +12,16 @@ DECLARE
__account_id INT;
__min_date TIMESTAMP;
BEGIN
__account_id = find_account_id( _account, True );
__post_id = find_comment_id( _author, _permlink, True );
__account_id = hivemind_app.find_account_id( _account, True );
__post_id = hivemind_app.find_comment_id( _author, _permlink, True );
IF __post_id <> 0 THEN
SELECT MIN(hfc.created_at) INTO __min_date
FROM hive_feed_cache hfc
JOIN hive_follows hf ON hfc.account_id = hf.following
FROM hivemind_app.hive_feed_cache hfc
JOIN hivemind_app.hive_follows hf ON hfc.account_id = hf.following
WHERE hf.state = 1 AND hf.follower = __account_id AND hfc.post_id = __post_id;
END IF;
__cutoff = block_before_head( '1 month' );
__cutoff = hivemind_app.block_before_head( '1 month' );
RETURN QUERY
WITH feed AS MATERIALIZED -- bridge_get_by_feed_with_reblog
@ -30,9 +30,9 @@ BEGIN
hfc.post_id,
MIN(hfc.created_at) as min_created,
array_agg(ha.name) AS reblogged_by
FROM hive_feed_cache hfc
JOIN hive_follows hf ON hfc.account_id = hf.following
JOIN hive_accounts ha ON ha.id = hf.following
FROM hivemind_app.hive_feed_cache hfc
JOIN hivemind_app.hive_follows hf ON hfc.account_id = hf.following
JOIN hivemind_app.hive_accounts ha ON ha.id = hf.following
WHERE hfc.block_num > __cutoff AND hf.state = 1 AND hf.follower = __account_id
GROUP BY hfc.post_id
HAVING __post_id = 0 OR MIN(hfc.created_at) < __min_date OR ( MIN(hfc.created_at) = __min_date AND hfc.post_id < __post_id )
@ -79,7 +79,7 @@ BEGIN
hp.is_muted,
feed.reblogged_by
FROM feed,
LATERAL get_post_view_by_id(feed.post_id) hp
LATERAL hivemind_app.get_post_view_by_id(feed.post_id) hp
ORDER BY feed.min_created DESC, feed.post_id DESC
LIMIT _limit;
END

View File

@ -1,5 +1,5 @@
DROP TYPE IF EXISTS bridge_api_community CASCADE;
CREATE TYPE bridge_api_community AS (
DROP TYPE IF EXISTS hivemind_app.bridge_api_community CASCADE;
CREATE TYPE hivemind_app.bridge_api_community AS (
id INTEGER,
name VARCHAR(16),
title VARCHAR(32),
@ -20,24 +20,24 @@ CREATE TYPE bridge_api_community AS (
team JSON
);
DROP FUNCTION IF EXISTS bridge_get_community
DROP FUNCTION IF EXISTS hivemind_app.bridge_get_community
;
CREATE OR REPLACE FUNCTION bridge_get_community(
in _name hive_communities.name%TYPE,
in _observer hive_accounts.name%TYPE
CREATE OR REPLACE FUNCTION hivemind_app.bridge_get_community(
in _name hivemind_app.hive_communities.name%TYPE,
in _observer hivemind_app.hive_accounts.name%TYPE
)
RETURNS SETOF bridge_api_community
RETURNS SETOF hivemind_app.bridge_api_community
LANGUAGE plpgsql
AS
$function$
DECLARE
__observer_id INT;
__community_id INT := find_community_id( _name, True );
__community_id INT := hivemind_app.find_community_id( _name, True );
__context JSON := '{}'::json;
BEGIN
IF _observer <> '' THEN
__observer_id = find_account_id( _observer, True );
__context= bridge_get_community_context(_observer, _name);
__observer_id = hivemind_app.find_account_id( _observer, True );
__context= hivemind_app.bridge_get_community_context(_observer, _name);
END IF;
RETURN QUERY SELECT
@ -58,13 +58,13 @@ BEGIN
hc.flag_text,
hc.settings::JSON,
__context,
(SELECT json_agg(json_build_array(a.name, get_role_name(r.role_id), r.title) ORDER BY r.role_id DESC, r.account_id DESC)
FROM hive_roles r
JOIN hive_accounts a ON r.account_id = a.id
(SELECT json_agg(json_build_array(a.name, hivemind_app.get_role_name(r.role_id), r.title) ORDER BY r.role_id DESC, r.account_id DESC)
FROM hivemind_app.hive_roles r
JOIN hivemind_app.hive_accounts a ON r.account_id = a.id
WHERE r.community_id = __community_id
AND r.role_id BETWEEN 4 AND 8
)
FROM hive_communities hc
FROM hivemind_app.hive_communities hc
WHERE hc.id = __community_id
GROUP BY hc.id
;

View File

@ -1,23 +1,23 @@
DROP TYPE IF EXISTS bridge_api_community_context CASCADE;
CREATE TYPE bridge_api_community_context AS (
DROP TYPE IF EXISTS hivemind_app.bridge_api_community_context CASCADE;
CREATE TYPE hivemind_app.bridge_api_community_context AS (
role_id SMALLINT,
title VARCHAR,
subscribed BOOLEAN
);
DROP FUNCTION IF EXISTS bridge_get_community_context
DROP FUNCTION IF EXISTS hivemind_app.bridge_get_community_context
;
CREATE OR REPLACE FUNCTION bridge_get_community_context(
in _account hive_accounts.name%TYPE,
in _name hive_communities.name%TYPE
CREATE OR REPLACE FUNCTION hivemind_app.bridge_get_community_context(
in _account hivemind_app.hive_accounts.name%TYPE,
in _name hivemind_app.hive_communities.name%TYPE
)
RETURNS SETOF JSON
LANGUAGE plpgsql
AS
$function$
DECLARE
__account_id INT := find_account_id( _account, True );
__community_id INT := find_community_id( _name, True );
__account_id INT := hivemind_app.find_account_id( _account, True );
__community_id INT := hivemind_app.find_community_id( _name, True );
__subscribed BOOLEAN;
BEGIN
@ -26,22 +26,22 @@ BEGIN
RETURN;
END IF;
__subscribed = EXISTS(SELECT 1 FROM hive_subscriptions WHERE account_id = __account_id AND community_id = __community_id);
__subscribed = EXISTS(SELECT 1 FROM hivemind_app.hive_subscriptions WHERE account_id = __account_id AND community_id = __community_id);
RETURN QUERY SELECT
json_build_object(
'role', get_role_name(role_id),
'role', hivemind_app.get_role_name(role_id),
'subscribed', __subscribed,
'title', title
)
FROM hive_roles
FROM hivemind_app.hive_roles
WHERE account_id = __account_id
AND community_id = __community_id
;
IF NOT FOUND THEN
RETURN QUERY SELECT json_build_object(
'role', get_role_name(0),
'role', hivemind_app.get_role_name(0),
'subscribed', __subscribed,
'title', ''
);

View File

@ -1,13 +1,13 @@
DROP FUNCTION IF EXISTS bridge_get_discussion;
CREATE OR REPLACE FUNCTION public.bridge_get_discussion(_author character varying, _permlink character varying, _observer character varying)
RETURNS SETOF bridge_api_post_discussion
DROP FUNCTION IF EXISTS hivemind_app.bridge_get_discussion;
CREATE OR REPLACE FUNCTION hivemind_app.bridge_get_discussion(_author character varying, _permlink character varying, _observer character varying)
RETURNS SETOF hivemind_app.bridge_api_post_discussion
AS $function$
DECLARE
__post_id INT;
__observer_id INT;
BEGIN
__post_id = find_comment_id( _author, _permlink, True );
__observer_id = find_account_id( _observer, True );
__post_id = hivemind_app.find_comment_id( _author, _permlink, True );
__observer_id = hivemind_app.find_account_id( _observer, True );
RETURN QUERY
SELECT -- bridge_get_discussion
hpv.id,
@ -53,23 +53,23 @@ BEGIN
(
WITH RECURSIVE child_posts (id, parent_id) AS MATERIALIZED
(
SELECT hp.id, hp.parent_id, blacklisted_by_observer_view.source as source
FROM live_posts_comments_view hp left outer join blacklisted_by_observer_view on (blacklisted_by_observer_view.observer_id = __observer_id AND blacklisted_by_observer_view.blacklisted_id = hp.author_id)
SELECT hp.id, hp.parent_id, hivemind_app.blacklisted_by_observer_view.source as source
FROM hivemind_app.live_posts_comments_view hp left outer join hivemind_app.blacklisted_by_observer_view on (hivemind_app.blacklisted_by_observer_view.observer_id = __observer_id AND hivemind_app.blacklisted_by_observer_view.blacklisted_id = hp.author_id)
WHERE hp.id = __post_id
AND (NOT EXISTS (SELECT 1 FROM muted_accounts_by_id_view WHERE observer_id = __observer_id AND muted_id = hp.author_id))
AND (NOT EXISTS (SELECT 1 FROM hivemind_app.muted_accounts_by_id_view WHERE observer_id = __observer_id AND muted_id = hp.author_id))
UNION ALL
SELECT children.id, children.parent_id, blacklisted_by_observer_view.source as source
FROM live_posts_comments_view children left outer join blacklisted_by_observer_view on (blacklisted_by_observer_view.observer_id = __observer_id AND blacklisted_by_observer_view.blacklisted_id = children.author_id)
SELECT children.id, children.parent_id, hivemind_app.blacklisted_by_observer_view.source as source
FROM hivemind_app.live_posts_comments_view children left outer join hivemind_app.blacklisted_by_observer_view on (hivemind_app.blacklisted_by_observer_view.observer_id = __observer_id AND hivemind_app.blacklisted_by_observer_view.blacklisted_id = children.author_id)
JOIN child_posts ON children.parent_id = child_posts.id
JOIN hive_accounts ON children.author_id = hive_accounts.id
AND (NOT EXISTS (SELECT 1 FROM muted_accounts_by_id_view WHERE observer_id = __observer_id AND muted_id = children.author_id))
JOIN hivemind_app.hive_accounts ON children.author_id = hivemind_app.hive_accounts.id
AND (NOT EXISTS (SELECT 1 FROM hivemind_app.muted_accounts_by_id_view WHERE observer_id = __observer_id AND muted_id = children.author_id))
)
SELECT hp2.id, cp.source
FROM hive_posts hp2
FROM hivemind_app.hive_posts hp2
JOIN child_posts cp ON cp.id = hp2.id
ORDER BY hp2.id
) ds,
LATERAL get_post_view_by_id(ds.id) hpv
LATERAL hivemind_app.get_post_view_by_id(ds.id) hpv
ORDER BY ds.id
LIMIT 2000;
END

View File

@ -1,13 +1,13 @@
DROP FUNCTION IF EXISTS bridge_get_post;
DROP FUNCTION IF EXISTS hivemind_app.bridge_get_post;
CREATE FUNCTION bridge_get_post( in _author VARCHAR, in _permlink VARCHAR )
RETURNS SETOF bridge_api_post
CREATE FUNCTION hivemind_app.bridge_get_post( in _author VARCHAR, in _permlink VARCHAR )
RETURNS SETOF hivemind_app.bridge_api_post
AS
$function$
DECLARE
__post_id INT;
BEGIN
__post_id = find_comment_id( _author, _permlink, True );
__post_id = hivemind_app.find_comment_id( _author, _permlink, True );
RETURN QUERY SELECT
hp.id,
hp.author,
@ -47,7 +47,7 @@ BEGIN
hp.curator_payout_value,
hp.is_muted,
NULL
FROM get_post_view_by_id(__post_id) hp;
FROM hivemind_app.get_post_view_by_id(__post_id) hp;
END
$function$
language plpgsql STABLE;

View File

@ -1,26 +1,26 @@
DROP FUNCTION IF EXISTS bridge_get_ranked_post_by_created;
CREATE FUNCTION bridge_get_ranked_post_by_created( in _author VARCHAR, in _permlink VARCHAR, in _limit SMALLINT, in _observer VARCHAR )
RETURNS SETOF bridge_api_post
DROP FUNCTION IF EXISTS hivemind_app.bridge_get_ranked_post_by_created;
CREATE FUNCTION hivemind_app.bridge_get_ranked_post_by_created( in _author VARCHAR, in _permlink VARCHAR, in _limit SMALLINT, in _observer VARCHAR )
RETURNS SETOF hivemind_app.bridge_api_post
AS
$function$
DECLARE
__post_id INT;
__observer_id INT;
BEGIN
__post_id = find_comment_id( _author, _permlink, True );
__observer_id = find_account_id( _observer, True );
__post_id = hivemind_app.find_comment_id( _author, _permlink, True );
__observer_id = hivemind_app.find_account_id( _observer, True );
RETURN QUERY
WITH created AS MATERIALIZED -- bridge_get_ranked_post_by_created
(
SELECT
hp1.id,
blacklist.source
FROM live_posts_view hp1
JOIN hive_accounts_view ha ON hp1.author_id = ha.id
LEFT OUTER JOIN blacklisted_by_observer_view blacklist ON (blacklist.observer_id = __observer_id AND blacklist.blacklisted_id = hp1.author_id)
FROM hivemind_app.live_posts_view hp1
JOIN hivemind_app.hive_accounts_view ha ON hp1.author_id = ha.id
LEFT OUTER JOIN hivemind_app.blacklisted_by_observer_view blacklist ON (blacklist.observer_id = __observer_id AND blacklist.blacklisted_id = hp1.author_id)
WHERE NOT ha.is_grayed
AND ( __post_id = 0 OR hp1.id < __post_id )
AND (NOT EXISTS (SELECT 1 FROM muted_accounts_by_id_view WHERE observer_id = __observer_id AND muted_id = hp1.author_id))
AND (NOT EXISTS (SELECT 1 FROM hivemind_app.muted_accounts_by_id_view WHERE observer_id = __observer_id AND muted_id = hp1.author_id))
ORDER BY hp1.id DESC
LIMIT _limit
)
@ -64,16 +64,16 @@ BEGIN
hp.is_muted,
created.source
FROM created,
LATERAL get_post_view_by_id(created.id) hp
LATERAL hivemind_app.get_post_view_by_id(created.id) hp
ORDER BY created.id DESC
LIMIT _limit;
END
$function$
language plpgsql STABLE;
DROP FUNCTION IF EXISTS bridge_get_ranked_post_by_hot;
CREATE FUNCTION bridge_get_ranked_post_by_hot( in _author VARCHAR, in _permlink VARCHAR, in _limit SMALLINT, in _observer VARCHAR )
RETURNS SETOF bridge_api_post
DROP FUNCTION IF EXISTS hivemind_app.bridge_get_ranked_post_by_hot;
CREATE FUNCTION hivemind_app.bridge_get_ranked_post_by_hot( in _author VARCHAR, in _permlink VARCHAR, in _limit SMALLINT, in _observer VARCHAR )
RETURNS SETOF hivemind_app.bridge_api_post
AS
$function$
DECLARE
@ -81,10 +81,10 @@ DECLARE
__hot_limit FLOAT;
__observer_id INT;
BEGIN
__post_id = find_comment_id( _author, _permlink, True );
__observer_id = find_account_id( _observer, True );
__post_id = hivemind_app.find_comment_id( _author, _permlink, True );
__observer_id = hivemind_app.find_account_id( _observer, True );
IF __post_id <> 0 THEN
SELECT hp.sc_hot INTO __hot_limit FROM hive_posts hp WHERE hp.id = __post_id;
SELECT hp.sc_hot INTO __hot_limit FROM hivemind_app.hive_posts hp WHERE hp.id = __post_id;
END IF;
RETURN QUERY
WITH hot AS MATERIALIZED -- bridge_get_ranked_post_by_hot
@ -93,11 +93,11 @@ BEGIN
hp1.id,
hp1.sc_hot,
blacklist.source
FROM live_posts_view hp1
LEFT OUTER JOIN blacklisted_by_observer_view blacklist ON (blacklist.observer_id = __observer_id AND blacklist.blacklisted_id = hp1.author_id)
FROM hivemind_app.live_posts_view hp1
LEFT OUTER JOIN hivemind_app.blacklisted_by_observer_view blacklist ON (blacklist.observer_id = __observer_id AND blacklist.blacklisted_id = hp1.author_id)
WHERE NOT hp1.is_paidout
AND ( __post_id = 0 OR hp1.sc_hot < __hot_limit OR ( hp1.sc_hot = __hot_limit AND hp1.id < __post_id ) )
AND (NOT EXISTS (SELECT 1 FROM muted_accounts_by_id_view WHERE observer_id = __observer_id AND muted_id = hp1.author_id))
AND (NOT EXISTS (SELECT 1 FROM hivemind_app.muted_accounts_by_id_view WHERE observer_id = __observer_id AND muted_id = hp1.author_id))
ORDER BY hp1.sc_hot DESC, hp1.id DESC
LIMIT _limit
)
@ -141,27 +141,27 @@ BEGIN
hp.is_muted,
hot.source
FROM hot,
LATERAL get_post_view_by_id(hot.id) hp
LATERAL hivemind_app.get_post_view_by_id(hot.id) hp
ORDER BY hot.sc_hot DESC, hot.id DESC
LIMIT _limit;
END
$function$
language plpgsql STABLE;
DROP FUNCTION IF EXISTS bridge_get_ranked_post_by_muted;
CREATE FUNCTION bridge_get_ranked_post_by_muted( in _author VARCHAR, in _permlink VARCHAR, in _limit SMALLINT, in _observer VARCHAR )
RETURNS SETOF bridge_api_post
DROP FUNCTION IF EXISTS hivemind_app.bridge_get_ranked_post_by_muted;
CREATE FUNCTION hivemind_app.bridge_get_ranked_post_by_muted( in _author VARCHAR, in _permlink VARCHAR, in _limit SMALLINT, in _observer VARCHAR )
RETURNS SETOF hivemind_app.bridge_api_post
AS
$function$
DECLARE
__post_id INT;
__payout_limit hive_posts.payout%TYPE;
__payout_limit hivemind_app.hive_posts.payout%TYPE;
__observer_id INT;
BEGIN
__post_id = find_comment_id( _author, _permlink, True );
__observer_id = find_account_id(_observer, True);
__post_id = hivemind_app.find_comment_id( _author, _permlink, True );
__observer_id = hivemind_app.find_account_id(_observer, True);
IF __post_id <> 0 THEN
SELECT ( hp.payout + hp.pending_payout ) INTO __payout_limit FROM hive_posts hp WHERE hp.id = __post_id;
SELECT ( hp.payout + hp.pending_payout ) INTO __payout_limit FROM hivemind_app.hive_posts hp WHERE hp.id = __post_id;
END IF;
RETURN QUERY
WITH payout AS MATERIALIZED -- bridge_get_ranked_post_by_muted
@ -170,9 +170,9 @@ BEGIN
hp1.id,
(hp1.payout + hp1.pending_payout) as total_payout,
blacklist.source
FROM live_posts_comments_view hp1
JOIN hive_accounts_view ha ON hp1.author_id = ha.id
LEFT OUTER JOIN blacklisted_by_observer_view blacklist ON (blacklist.observer_id = __observer_id AND blacklist.blacklisted_id = hp1.author_id)
FROM hivemind_app.live_posts_comments_view hp1
JOIN hivemind_app.hive_accounts_view ha ON hp1.author_id = ha.id
LEFT OUTER JOIN hivemind_app.blacklisted_by_observer_view blacklist ON (blacklist.observer_id = __observer_id AND blacklist.blacklisted_id = hp1.author_id)
WHERE NOT hp1.is_paidout
AND ha.is_grayed
AND (hp1.payout + hp1.pending_payout) > 0
@ -220,27 +220,27 @@ BEGIN
hp.is_muted,
payout.source
FROM payout,
LATERAL get_post_view_by_id(payout.id) hp
LATERAL hivemind_app.get_post_view_by_id(payout.id) hp
ORDER BY payout.total_payout DESC, payout.id DESC
LIMIT _limit;
END
$function$
language plpgsql STABLE;
DROP FUNCTION IF EXISTS bridge_get_ranked_post_by_payout_comments;
CREATE FUNCTION bridge_get_ranked_post_by_payout_comments( in _author VARCHAR, in _permlink VARCHAR, in _limit SMALLINT, in _observer VARCHAR )
RETURNS SETOF bridge_api_post
DROP FUNCTION IF EXISTS hivemind_app.bridge_get_ranked_post_by_payout_comments;
CREATE FUNCTION hivemind_app.bridge_get_ranked_post_by_payout_comments( in _author VARCHAR, in _permlink VARCHAR, in _limit SMALLINT, in _observer VARCHAR )
RETURNS SETOF hivemind_app.bridge_api_post
AS
$function$
DECLARE
__post_id INT;
__payout_limit hive_posts.payout%TYPE;
__payout_limit hivemind_app.hive_posts.payout%TYPE;
__observer_id INT;
BEGIN
__post_id = find_comment_id( _author, _permlink, True );
__observer_id = find_account_id( _observer, True );
__post_id = hivemind_app.find_comment_id( _author, _permlink, True );
__observer_id = hivemind_app.find_account_id( _observer, True );
IF __post_id <> 0 THEN
SELECT (hp.payout + hp.pending_payout) INTO __payout_limit FROM hive_posts hp WHERE hp.id = __post_id;
SELECT (hp.payout + hp.pending_payout) INTO __payout_limit FROM hivemind_app.hive_posts hp WHERE hp.id = __post_id;
END IF;
RETURN QUERY
WITH payout AS MATERIALIZED -- bridge_get_ranked_post_by_payout_comments
@ -249,12 +249,12 @@ BEGIN
hp1.id,
(hp1.payout + hp1.pending_payout) as total_payout,
blacklist.source
FROM live_comments_view hp1
LEFT OUTER JOIN blacklisted_by_observer_view blacklist ON (blacklist.observer_id = __observer_id AND blacklist.blacklisted_id = hp1.author_id)
FROM hivemind_app.live_comments_view hp1
LEFT OUTER JOIN hivemind_app.blacklisted_by_observer_view blacklist ON (blacklist.observer_id = __observer_id AND blacklist.blacklisted_id = hp1.author_id)
WHERE NOT hp1.is_paidout
AND ( __post_id = 0 OR (hp1.payout + hp1.pending_payout) < __payout_limit
OR ((hp1.payout + hp1.pending_payout) = __payout_limit AND hp1.id < __post_id) )
AND (NOT EXISTS (SELECT 1 FROM muted_accounts_by_id_view WHERE observer_id = __observer_id AND muted_id = hp1.author_id))
AND (NOT EXISTS (SELECT 1 FROM hivemind_app.muted_accounts_by_id_view WHERE observer_id = __observer_id AND muted_id = hp1.author_id))
ORDER BY (hp1.payout + hp1.pending_payout) DESC, hp1.id DESC
LIMIT _limit
)
@ -298,30 +298,30 @@ BEGIN
hp.is_muted,
payout.source
FROM payout,
LATERAL get_post_view_by_id(payout.id) hp
LATERAL hivemind_app.get_post_view_by_id(payout.id) hp
ORDER BY payout.total_payout DESC, payout.id DESC
LIMIT _limit;
END
$function$
language plpgsql STABLE;
DROP FUNCTION IF EXISTS bridge_get_ranked_post_by_payout;
CREATE FUNCTION bridge_get_ranked_post_by_payout( in _author VARCHAR, in _permlink VARCHAR, in _limit SMALLINT, in _bridge_api BOOLEAN, in _observer VARCHAR )
RETURNS SETOF bridge_api_post
DROP FUNCTION IF EXISTS hivemind_app.bridge_get_ranked_post_by_payout;
CREATE FUNCTION hivemind_app.bridge_get_ranked_post_by_payout( in _author VARCHAR, in _permlink VARCHAR, in _limit SMALLINT, in _bridge_api BOOLEAN, in _observer VARCHAR )
RETURNS SETOF hivemind_app.bridge_api_post
AS
$function$
DECLARE
__post_id INT;
__payout_limit hive_posts.payout%TYPE;
__payout_limit hivemind_app.hive_posts.payout%TYPE;
__head_block_time TIMESTAMP;
__observer_id INT;
BEGIN
__post_id = find_comment_id( _author, _permlink, True );
__observer_id = find_account_id( _observer, True );
__post_id = hivemind_app.find_comment_id( _author, _permlink, True );
__observer_id = hivemind_app.find_account_id( _observer, True );
IF __post_id <> 0 THEN
SELECT ( hp.payout + hp.pending_payout ) INTO __payout_limit FROM hive_posts hp WHERE hp.id = __post_id;
SELECT ( hp.payout + hp.pending_payout ) INTO __payout_limit FROM hivemind_app.hive_posts hp WHERE hp.id = __post_id;
END IF;
__head_block_time = head_block_time();
__head_block_time = hivemind_app.head_block_time();
RETURN QUERY
WITH payout AS MATERIALIZED -- bridge_get_ranked_post_by_payout
(
@ -329,12 +329,12 @@ BEGIN
hp1.id,
(hp1.payout + hp1.pending_payout) as total_payout,
blacklist.source
FROM live_posts_comments_view hp1
LEFT OUTER JOIN blacklisted_by_observer_view blacklist ON (blacklist.observer_id = __observer_id AND blacklist.blacklisted_id = hp1.author_id)
FROM hivemind_app.live_posts_comments_view hp1
LEFT OUTER JOIN hivemind_app.blacklisted_by_observer_view blacklist ON (blacklist.observer_id = __observer_id AND blacklist.blacklisted_id = hp1.author_id)
WHERE NOT hp1.is_paidout
AND ( ( NOT _bridge_api AND hp1.depth = 0 ) OR ( _bridge_api AND hp1.payout_at BETWEEN __head_block_time + interval '12 hours' AND __head_block_time + interval '36 hours' ) )
AND ( __post_id = 0 OR ( hp1.payout + hp1.pending_payout ) < __payout_limit OR ( ( hp1.payout + hp1.pending_payout ) = __payout_limit AND hp1.id < __post_id ) )
AND (NOT EXISTS (SELECT 1 FROM muted_accounts_by_id_view WHERE observer_id = __observer_id AND muted_id = hp1.author_id))
AND (NOT EXISTS (SELECT 1 FROM hivemind_app.muted_accounts_by_id_view WHERE observer_id = __observer_id AND muted_id = hp1.author_id))
ORDER BY (hp1.payout + hp1.pending_payout) DESC, hp1.id DESC
LIMIT _limit
)
@ -378,27 +378,27 @@ BEGIN
hp.is_muted,
payout.source
FROM payout,
LATERAL get_post_view_by_id(payout.id) hp
LATERAL hivemind_app.get_post_view_by_id(payout.id) hp
ORDER BY payout.total_payout DESC, payout.id DESC
LIMIT _limit;
END
$function$
language plpgsql STABLE;
DROP FUNCTION IF EXISTS bridge_get_ranked_post_by_promoted;
CREATE FUNCTION bridge_get_ranked_post_by_promoted( in _author VARCHAR, in _permlink VARCHAR, in _limit SMALLINT, in _observer VARCHAR )
RETURNS SETOF bridge_api_post
DROP FUNCTION IF EXISTS hivemind_app.bridge_get_ranked_post_by_promoted;
CREATE FUNCTION hivemind_app.bridge_get_ranked_post_by_promoted( in _author VARCHAR, in _permlink VARCHAR, in _limit SMALLINT, in _observer VARCHAR )
RETURNS SETOF hivemind_app.bridge_api_post
AS
$function$
DECLARE
__post_id INT;
__promoted_limit hive_posts.promoted%TYPE;
__promoted_limit hivemind_app.hive_posts.promoted%TYPE;
__observer_id INT;
BEGIN
__post_id = find_comment_id( _author, _permlink, True );
__observer_id = find_account_id( _observer, True );
__post_id = hivemind_app.find_comment_id( _author, _permlink, True );
__observer_id = hivemind_app.find_account_id( _observer, True );
IF __post_id <> 0 THEN
SELECT hp.promoted INTO __promoted_limit FROM hive_posts hp WHERE hp.id = __post_id;
SELECT hp.promoted INTO __promoted_limit FROM hivemind_app.hive_posts hp WHERE hp.id = __post_id;
END IF;
RETURN QUERY
WITH promoted AS MATERIALIZED -- bridge_get_ranked_post_by_promoted
@ -407,12 +407,12 @@ BEGIN
hp1.id,
hp1.promoted,
blacklist.source
FROM live_posts_comments_view hp1
LEFT OUTER JOIN blacklisted_by_observer_view blacklist ON (blacklist.observer_id = __observer_id AND blacklist.blacklisted_id = hp1.author_id)
FROM hivemind_app.live_posts_comments_view hp1
LEFT OUTER JOIN hivemind_app.blacklisted_by_observer_view blacklist ON (blacklist.observer_id = __observer_id AND blacklist.blacklisted_id = hp1.author_id)
WHERE NOT hp1.is_paidout
AND hp1.promoted > 0
AND ( __post_id = 0 OR hp1.promoted < __promoted_limit OR ( hp1.promoted = __promoted_limit AND hp1.id < __post_id ) )
AND (NOT EXISTS (SELECT 1 FROM muted_accounts_by_id_view WHERE observer_id = __observer_id AND muted_id = hp1.author_id))
AND (NOT EXISTS (SELECT 1 FROM hivemind_app.muted_accounts_by_id_view WHERE observer_id = __observer_id AND muted_id = hp1.author_id))
ORDER BY hp1.promoted DESC, hp1.id DESC
LIMIT _limit
)
@ -456,16 +456,16 @@ BEGIN
hp.is_muted,
promoted.source
FROM promoted,
LATERAL get_post_view_by_id(promoted.id) hp
LATERAL hivemind_app.get_post_view_by_id(promoted.id) hp
ORDER BY promoted.promoted DESC, promoted.id DESC
LIMIT _limit;
END
$function$
language plpgsql STABLE;
DROP FUNCTION IF EXISTS bridge_get_ranked_post_by_trends;
CREATE FUNCTION bridge_get_ranked_post_by_trends( in _author VARCHAR, in _permlink VARCHAR, in _limit SMALLINT, in _observer VARCHAR )
RETURNS SETOF bridge_api_post
DROP FUNCTION IF EXISTS hivemind_app.bridge_get_ranked_post_by_trends;
CREATE FUNCTION hivemind_app.bridge_get_ranked_post_by_trends( in _author VARCHAR, in _permlink VARCHAR, in _limit SMALLINT, in _observer VARCHAR )
RETURNS SETOF hivemind_app.bridge_api_post
AS
$function$
DECLARE
@ -473,10 +473,10 @@ DECLARE
__trending_limit FLOAT;
__observer_id INT;
BEGIN
__post_id = find_comment_id( _author, _permlink, True );
__observer_id = find_account_id( _observer, True );
__post_id = hivemind_app.find_comment_id( _author, _permlink, True );
__observer_id = hivemind_app.find_account_id( _observer, True );
IF __post_id <> 0 THEN
SELECT hp.sc_trend INTO __trending_limit FROM hive_posts hp WHERE hp.id = __post_id;
SELECT hp.sc_trend INTO __trending_limit FROM hivemind_app.hive_posts hp WHERE hp.id = __post_id;
END IF;
RETURN QUERY
WITH trends AS MATERIALIZED -- bridge_get_ranked_post_by_trends
@ -486,11 +486,11 @@ BEGIN
hp1.sc_trend as trend,
blacklist.source
FROM
live_posts_view hp1
LEFT OUTER JOIN blacklisted_by_observer_view blacklist ON (blacklist.observer_id = __observer_id AND blacklist.blacklisted_id = hp1.author_id)
hivemind_app.live_posts_view hp1
LEFT OUTER JOIN hivemind_app.blacklisted_by_observer_view blacklist ON (blacklist.observer_id = __observer_id AND blacklist.blacklisted_id = hp1.author_id)
WHERE NOT hp1.is_paidout
AND ( __post_id = 0 OR hp1.sc_trend < __trending_limit OR ( hp1.sc_trend = __trending_limit AND hp1.id < __post_id ) )
AND (NOT EXISTS (SELECT 1 FROM muted_accounts_by_id_view WHERE observer_id = __observer_id AND muted_id = hp1.author_id))
AND (NOT EXISTS (SELECT 1 FROM hivemind_app.muted_accounts_by_id_view WHERE observer_id = __observer_id AND muted_id = hp1.author_id))
ORDER BY hp1.sc_trend DESC, hp1.id DESC
LIMIT _limit
)
@ -534,7 +534,7 @@ BEGIN
hp.is_muted,
trends.source
FROM trends,
LATERAL get_post_view_by_id(trends.id) hp
LATERAL hivemind_app.get_post_view_by_id(trends.id) hp
ORDER BY trends.trend DESC, trends.id DESC
LIMIT _limit;
END

View File

@ -1,15 +1,15 @@
DROP FUNCTION IF EXISTS bridge_get_ranked_post_pinned_for_community;
CREATE FUNCTION bridge_get_ranked_post_pinned_for_community( in _community VARCHAR, in _author VARCHAR, in _permlink VARCHAR, in _limit SMALLINT, in _observer VARCHAR)
RETURNS SETOF bridge_api_post
DROP FUNCTION IF EXISTS hivemind_app.bridge_get_ranked_post_pinned_for_community;
CREATE FUNCTION hivemind_app.bridge_get_ranked_post_pinned_for_community( in _community VARCHAR, in _author VARCHAR, in _permlink VARCHAR, in _limit SMALLINT, in _observer VARCHAR)
RETURNS SETOF hivemind_app.bridge_api_post
AS
$function$
DECLARE
__observer_id INT;
__post_id INT;
BEGIN
__observer_id = find_account_id( _observer, True );
__post_id = find_comment_id( _author, _permlink, True );
IF __post_id <> 0 AND NOT is_pinned( __post_id ) THEN
__observer_id = hivemind_app.find_account_id( _observer, True );
__post_id = hivemind_app.find_comment_id( _author, _permlink, True );
IF __post_id <> 0 AND NOT hivemind_app.is_pinned( __post_id ) THEN
RETURN;
END IF;
RETURN QUERY
@ -18,12 +18,12 @@ BEGIN
SELECT
hp.id,
blacklist.source
FROM live_posts_comments_view hp -- is this really supported for comments (maybe pinning is prevented?)?
JOIN hive_communities hc ON hc.id = hp.community_id
LEFT OUTER JOIN blacklisted_by_observer_view blacklist ON (blacklist.observer_id = __observer_id AND blacklist.blacklisted_id = hp.author_id)
FROM hivemind_app.live_posts_comments_view hp -- is this really supported for comments (maybe pinning is prevented?)?
JOIN hivemind_app.hive_communities hc ON hc.id = hp.community_id
LEFT OUTER JOIN hivemind_app.blacklisted_by_observer_view blacklist ON (blacklist.observer_id = __observer_id AND blacklist.blacklisted_id = hp.author_id)
WHERE hc.name = _community AND hp.is_pinned
AND (__post_id = 0 OR hp.id < __post_id)
AND (NOT EXISTS (SELECT 1 FROM muted_accounts_by_id_view WHERE observer_id = __observer_id AND muted_id = hp.author_id))
AND (NOT EXISTS (SELECT 1 FROM hivemind_app.muted_accounts_by_id_view WHERE observer_id = __observer_id AND muted_id = hp.author_id))
ORDER BY hp.id DESC
LIMIT _limit
)
@ -67,16 +67,16 @@ BEGIN
hp.is_muted,
pinned.source
FROM pinned,
LATERAL get_post_view_by_id(pinned.id) hp
LATERAL hivemind_app.get_post_view_by_id(pinned.id) hp
ORDER BY hp.id DESC
LIMIT _limit;
END
$function$
language plpgsql STABLE;
DROP FUNCTION IF EXISTS bridge_get_ranked_post_by_trends_for_community;
CREATE FUNCTION bridge_get_ranked_post_by_trends_for_community( in _community VARCHAR, in _author VARCHAR, in _permlink VARCHAR, in _limit SMALLINT, in _bridge_api BOOLEAN, in _observer VARCHAR )
RETURNS SETOF bridge_api_post
DROP FUNCTION IF EXISTS hivemind_app.bridge_get_ranked_post_by_trends_for_community;
CREATE FUNCTION hivemind_app.bridge_get_ranked_post_by_trends_for_community( in _community VARCHAR, in _author VARCHAR, in _permlink VARCHAR, in _limit SMALLINT, in _bridge_api BOOLEAN, in _observer VARCHAR )
RETURNS SETOF hivemind_app.bridge_api_post
AS
$function$
DECLARE
@ -84,10 +84,10 @@ DECLARE
__trending_limit FLOAT;
__observer_id INT;
BEGIN
__post_id = find_comment_id( _author, _permlink, True );
__observer_id = find_account_id( _observer, True );
IF __post_id <> 0 AND NOT is_pinned( __post_id ) THEN
SELECT hp.sc_trend INTO __trending_limit FROM hive_posts hp WHERE hp.id = __post_id;
__post_id = hivemind_app.find_comment_id( _author, _permlink, True );
__observer_id = hivemind_app.find_account_id( _observer, True );
IF __post_id <> 0 AND NOT hivemind_app.is_pinned( __post_id ) THEN
SELECT hp.sc_trend INTO __trending_limit FROM hivemind_app.hive_posts hp WHERE hp.id = __post_id;
ELSE
__post_id = 0;
END IF;
@ -99,14 +99,14 @@ BEGIN
hp1.id,
hp1.sc_trend as trend,
blacklist.source
FROM live_posts_view hp1
JOIN hive_communities hc ON hp1.community_id = hc.id
LEFT OUTER JOIN blacklisted_by_observer_view blacklist ON (blacklist.observer_id = __observer_id AND blacklist.blacklisted_id = hp1.author_id)
FROM hivemind_app.live_posts_view hp1
JOIN hivemind_app.hive_communities hc ON hp1.community_id = hc.id
LEFT OUTER JOIN hivemind_app.blacklisted_by_observer_view blacklist ON (blacklist.observer_id = __observer_id AND blacklist.blacklisted_id = hp1.author_id)
WHERE hc.name = _community
AND NOT hp1.is_paidout
AND ( NOT _bridge_api OR NOT hp1.is_pinned ) -- concatenated with bridge_get_ranked_post_pinned_for_community when called for bridge_api
AND ( __post_id = 0 OR hp1.sc_trend < __trending_limit OR ( hp1.sc_trend = __trending_limit AND hp1.id < __post_id ) )
AND (NOT EXISTS (SELECT 1 FROM muted_accounts_by_id_view WHERE observer_id = __observer_id AND muted_id = hp1.author_id))
AND (NOT EXISTS (SELECT 1 FROM hivemind_app.muted_accounts_by_id_view WHERE observer_id = __observer_id AND muted_id = hp1.author_id))
ORDER BY hp1.sc_trend DESC, hp1.id DESC
LIMIT _limit
)
@ -150,27 +150,27 @@ SELECT
hp.is_muted,
trends.source
FROM trends,
LATERAL get_post_view_by_id(trends.id) hp
LATERAL hivemind_app.get_post_view_by_id(trends.id) hp
ORDER BY trends.trend DESC, trends.id DESC
LIMIT _limit;
END
$function$
language plpgsql STABLE;
DROP FUNCTION IF EXISTS bridge_get_ranked_post_by_promoted_for_community;
CREATE FUNCTION bridge_get_ranked_post_by_promoted_for_community( in _community VARCHAR, in _author VARCHAR, in _permlink VARCHAR, in _limit SMALLINT, in _observer VARCHAR )
RETURNS SETOF bridge_api_post
DROP FUNCTION IF EXISTS hivemind_app.bridge_get_ranked_post_by_promoted_for_community;
CREATE FUNCTION hivemind_app.bridge_get_ranked_post_by_promoted_for_community( in _community VARCHAR, in _author VARCHAR, in _permlink VARCHAR, in _limit SMALLINT, in _observer VARCHAR )
RETURNS SETOF hivemind_app.bridge_api_post
AS
$function$
DECLARE
__post_id INT;
__promoted_limit hive_posts.promoted%TYPE;
__promoted_limit hivemind_app.hive_posts.promoted%TYPE;
__observer_id INT;
BEGIN
__post_id = find_comment_id( _author, _permlink, True );
__observer_id = find_account_id( _observer, True );
__post_id = hivemind_app.find_comment_id( _author, _permlink, True );
__observer_id = hivemind_app.find_account_id( _observer, True );
IF __post_id <> 0 THEN
SELECT hp.promoted INTO __promoted_limit FROM hive_posts hp WHERE hp.id = __post_id;
SELECT hp.promoted INTO __promoted_limit FROM hivemind_app.hive_posts hp WHERE hp.id = __post_id;
END IF;
RETURN QUERY
WITH promoted as MATERIALIZED -- bridge_get_ranked_post_by_promoted_for_community
@ -179,14 +179,14 @@ BEGIN
hp1.id,
hp1.promoted as promoted,
blacklist.source
FROM live_posts_comments_view hp1 -- maybe this should be live_posts_view?
JOIN hive_communities hc ON hp1.community_id = hc.id
LEFT OUTER JOIN blacklisted_by_observer_view blacklist ON (blacklist.observer_id = __observer_id AND blacklist.blacklisted_id = hp1.author_id)
FROM hivemind_app.live_posts_comments_view hp1 -- maybe this should be live_posts_view?
JOIN hivemind_app.hive_communities hc ON hp1.community_id = hc.id
LEFT OUTER JOIN hivemind_app.blacklisted_by_observer_view blacklist ON (blacklist.observer_id = __observer_id AND blacklist.blacklisted_id = hp1.author_id)
WHERE hc.name = _community
AND hp1.promoted > 0
AND NOT hp1.is_paidout
AND ( __post_id = 0 OR hp1.promoted < __promoted_limit OR ( hp1.promoted = __promoted_limit AND hp1.id < __post_id ) )
AND (NOT EXISTS (SELECT 1 FROM muted_accounts_by_id_view WHERE observer_id = __observer_id AND muted_id = hp1.author_id))
AND (NOT EXISTS (SELECT 1 FROM hivemind_app.muted_accounts_by_id_view WHERE observer_id = __observer_id AND muted_id = hp1.author_id))
ORDER BY hp1.promoted DESC, hp1.id DESC
LIMIT _limit
)
@ -230,30 +230,30 @@ BEGIN
hp.is_muted,
promoted.source
FROM promoted,
LATERAL get_post_view_by_id(promoted.id) hp
LATERAL hivemind_app.get_post_view_by_id(promoted.id) hp
ORDER BY promoted.promoted DESC, promoted.id DESC
LIMIT _limit;
END
$function$
language plpgsql STABLE;
DROP FUNCTION IF EXISTS bridge_get_ranked_post_by_payout_for_community;
CREATE FUNCTION bridge_get_ranked_post_by_payout_for_community(in _community VARCHAR, in _author VARCHAR, in _permlink VARCHAR, in _limit SMALLINT, in _observer VARCHAR )
RETURNS SETOF bridge_api_post
DROP FUNCTION IF EXISTS hivemind_app.bridge_get_ranked_post_by_payout_for_community;
CREATE FUNCTION hivemind_app.bridge_get_ranked_post_by_payout_for_community(in _community VARCHAR, in _author VARCHAR, in _permlink VARCHAR, in _limit SMALLINT, in _observer VARCHAR )
RETURNS SETOF hivemind_app.bridge_api_post
AS
$function$
DECLARE
__post_id INT;
__payout_limit hive_posts.payout%TYPE;
__payout_limit hivemind_app.hive_posts.payout%TYPE;
__head_block_time TIMESTAMP;
__observer_id INT;
BEGIN
__post_id = find_comment_id( _author, _permlink, True );
__observer_id = find_account_id( _observer, True );
__post_id = hivemind_app.find_comment_id( _author, _permlink, True );
__observer_id = hivemind_app.find_account_id( _observer, True );
IF __post_id <> 0 THEN
SELECT ( hp.payout + hp.pending_payout ) INTO __payout_limit FROM hive_posts hp WHERE hp.id = __post_id;
SELECT ( hp.payout + hp.pending_payout ) INTO __payout_limit FROM hivemind_app.hive_posts hp WHERE hp.id = __post_id;
END IF;
__head_block_time = head_block_time();
__head_block_time = hivemind_app.head_block_time();
RETURN QUERY
WITH payout as MATERIALIZED -- bridge_get_ranked_post_by_payout_for_community
(
@ -261,14 +261,14 @@ BEGIN
hp1.id,
(hp1.payout + hp1.pending_payout) as total_payout,
blacklist.source
FROM live_posts_comments_view hp1
JOIN hive_communities hc ON hp1.community_id = hc.id
LEFT OUTER JOIN blacklisted_by_observer_view blacklist ON (blacklist.observer_id = __observer_id AND blacklist.blacklisted_id = hp1.author_id)
FROM hivemind_app.live_posts_comments_view hp1
JOIN hivemind_app.hive_communities hc ON hp1.community_id = hc.id
LEFT OUTER JOIN hivemind_app.blacklisted_by_observer_view blacklist ON (blacklist.observer_id = __observer_id AND blacklist.blacklisted_id = hp1.author_id)
WHERE hc.name = _community
AND NOT hp1.is_paidout
AND hp1.payout_at BETWEEN __head_block_time + interval '12 hours' AND __head_block_time + interval '36 hours'
AND ( __post_id = 0 OR ( hp1.payout + hp1.pending_payout ) < __payout_limit OR ( ( hp1.payout + hp1.pending_payout ) = __payout_limit AND hp1.id < __post_id ) )
AND (NOT EXISTS (SELECT 1 FROM muted_accounts_by_id_view WHERE observer_id = __observer_id AND muted_id = hp1.author_id))
AND (NOT EXISTS (SELECT 1 FROM hivemind_app.muted_accounts_by_id_view WHERE observer_id = __observer_id AND muted_id = hp1.author_id))
ORDER BY ( hp1.payout + hp1.pending_payout ) DESC, hp1.id DESC
LIMIT _limit
)
@ -312,27 +312,27 @@ BEGIN
hp.is_muted,
payout.source
FROM payout,
LATERAL get_post_view_by_id(payout.id) hp
LATERAL hivemind_app.get_post_view_by_id(payout.id) hp
ORDER BY payout.total_payout DESC, payout.id DESC
LIMIT _limit;
END
$function$
language plpgsql STABLE;
DROP FUNCTION IF EXISTS bridge_get_ranked_post_by_payout_comments_for_community;
CREATE FUNCTION bridge_get_ranked_post_by_payout_comments_for_community( in _community VARCHAR, in _author VARCHAR, in _permlink VARCHAR, in _limit SMALLINT, in _observer VARCHAR )
RETURNS SETOF bridge_api_post
DROP FUNCTION IF EXISTS hivemind_app.bridge_get_ranked_post_by_payout_comments_for_community;
CREATE FUNCTION hivemind_app.bridge_get_ranked_post_by_payout_comments_for_community( in _community VARCHAR, in _author VARCHAR, in _permlink VARCHAR, in _limit SMALLINT, in _observer VARCHAR )
RETURNS SETOF hivemind_app.bridge_api_post
AS
$function$
DECLARE
__post_id INT;
__payout_limit hive_posts.payout%TYPE;
__payout_limit hivemind_app.hive_posts.payout%TYPE;
__observer_id INT;
BEGIN
__post_id = find_comment_id( _author, _permlink, True );
__observer_id = find_account_id( _observer, True );
__post_id = hivemind_app.find_comment_id( _author, _permlink, True );
__observer_id = hivemind_app.find_account_id( _observer, True );
IF __post_id <> 0 THEN
SELECT ( hp.payout + hp.pending_payout ) INTO __payout_limit FROM hive_posts hp WHERE hp.id = __post_id;
SELECT ( hp.payout + hp.pending_payout ) INTO __payout_limit FROM hivemind_app.hive_posts hp WHERE hp.id = __post_id;
END IF;
RETURN QUERY
WITH payout as MATERIALIZED -- bridge_get_ranked_post_by_payout_comments_for_community
@ -341,14 +341,14 @@ BEGIN
hp1.id,
(hp1.payout + hp1.pending_payout) as total_payout,
blacklist.source
FROM live_comments_view hp1
JOIN hive_communities hc ON hp1.community_id = hc.id
LEFT OUTER JOIN blacklisted_by_observer_view blacklist ON (blacklist.observer_id = __observer_id AND blacklist.blacklisted_id = hp1.author_id)
FROM hivemind_app.live_comments_view hp1
JOIN hivemind_app.hive_communities hc ON hp1.community_id = hc.id
LEFT OUTER JOIN hivemind_app.blacklisted_by_observer_view blacklist ON (blacklist.observer_id = __observer_id AND blacklist.blacklisted_id = hp1.author_id)
WHERE hc.name = _community
AND NOT hp1.is_paidout
AND ( __post_id = 0 OR (hp1.payout + hp1.pending_payout) < __payout_limit
OR ((hp1.payout + hp1.pending_payout) = __payout_limit AND hp1.id < __post_id) )
AND (NOT EXISTS (SELECT 1 FROM muted_accounts_by_id_view WHERE observer_id = __observer_id AND muted_id = hp1.author_id))
AND (NOT EXISTS (SELECT 1 FROM hivemind_app.muted_accounts_by_id_view WHERE observer_id = __observer_id AND muted_id = hp1.author_id))
ORDER BY (hp1.payout + hp1.pending_payout) DESC, hp1.id DESC
LIMIT _limit
)
@ -392,28 +392,28 @@ BEGIN
hp.is_muted,
payout.source
FROM payout,
LATERAL get_post_view_by_id(payout.id) hp
LATERAL hivemind_app.get_post_view_by_id(payout.id) hp
ORDER BY payout.total_payout DESC, payout.id DESC
LIMIT _limit;
END
$function$
language plpgsql STABLE;
DROP FUNCTION IF EXISTS bridge_get_ranked_post_by_muted_for_community;
CREATE FUNCTION bridge_get_ranked_post_by_muted_for_community( in _community VARCHAR, in _author VARCHAR, in _permlink VARCHAR, in _limit SMALLINT, in _observer VARCHAR )
RETURNS SETOF bridge_api_post
DROP FUNCTION IF EXISTS hivemind_app.bridge_get_ranked_post_by_muted_for_community;
CREATE FUNCTION hivemind_app.bridge_get_ranked_post_by_muted_for_community( in _community VARCHAR, in _author VARCHAR, in _permlink VARCHAR, in _limit SMALLINT, in _observer VARCHAR )
RETURNS SETOF hivemind_app.bridge_api_post
AS
$function$
DECLARE
__post_id INT;
__payout_limit hive_posts.payout%TYPE;
__payout_limit hivemind_app.hive_posts.payout%TYPE;
__observer_id INT;
BEGIN
__post_id = find_comment_id( _author, _permlink, True );
__post_id = hivemind_app.find_comment_id( _author, _permlink, True );
IF __post_id <> 0 THEN
SELECT (hp.payout + hp.pending_payout) INTO __payout_limit FROM hive_posts hp WHERE hp.id = __post_id;
SELECT (hp.payout + hp.pending_payout) INTO __payout_limit FROM hivemind_app.hive_posts hp WHERE hp.id = __post_id;
END IF;
__observer_id = find_account_id(_observer, True);
__observer_id = hivemind_app.find_account_id(_observer, True);
RETURN QUERY
WITH payout as MATERIALIZED -- bridge_get_ranked_post_by_muted_for_community
(
@ -421,10 +421,10 @@ BEGIN
hp1.id,
(hp1.payout + hp1.pending_payout) as total_payout,
blacklist.source
FROM live_posts_comments_view hp1
JOIN hive_communities hc ON hp1.community_id = hc.id
JOIN hive_accounts_view ha ON hp1.author_id = ha.id
LEFT OUTER JOIN blacklisted_by_observer_view blacklist ON (blacklist.observer_id = __observer_id AND blacklist.blacklisted_id = hp1.author_id)
FROM hivemind_app.live_posts_comments_view hp1
JOIN hivemind_app.hive_communities hc ON hp1.community_id = hc.id
JOIN hivemind_app.hive_accounts_view ha ON hp1.author_id = ha.id
LEFT OUTER JOIN hivemind_app.blacklisted_by_observer_view blacklist ON (blacklist.observer_id = __observer_id AND blacklist.blacklisted_id = hp1.author_id)
WHERE hc.name = _community
AND NOT hp1.is_paidout
AND ha.is_grayed
@ -474,16 +474,16 @@ BEGIN
hp.is_muted,
payout.source
FROM payout,
LATERAL get_post_view_by_id(payout.id) hp
LATERAL hivemind_app.get_post_view_by_id(payout.id) hp
ORDER BY payout.total_payout DESC, payout.id DESC
LIMIT _limit;
END
$function$
language plpgsql STABLE;
DROP FUNCTION IF EXISTS bridge_get_ranked_post_by_hot_for_community;
CREATE FUNCTION bridge_get_ranked_post_by_hot_for_community( in _community VARCHAR, in _author VARCHAR, in _permlink VARCHAR, in _limit SMALLINT, in _observer VARCHAR )
RETURNS SETOF bridge_api_post
DROP FUNCTION IF EXISTS hivemind_app.bridge_get_ranked_post_by_hot_for_community;
CREATE FUNCTION hivemind_app.bridge_get_ranked_post_by_hot_for_community( in _community VARCHAR, in _author VARCHAR, in _permlink VARCHAR, in _limit SMALLINT, in _observer VARCHAR )
RETURNS SETOF hivemind_app.bridge_api_post
AS
$function$
DECLARE
@ -491,10 +491,10 @@ DECLARE
__hot_limit FLOAT;
__observer_id INT;
BEGIN
__post_id = find_comment_id( _author, _permlink, True );
__observer_id = find_account_id( _observer, True );
__post_id = hivemind_app.find_comment_id( _author, _permlink, True );
__observer_id = hivemind_app.find_account_id( _observer, True );
IF __post_id <> 0 THEN
SELECT hp.sc_hot INTO __hot_limit FROM hive_posts hp WHERE hp.id = __post_id;
SELECT hp.sc_hot INTO __hot_limit FROM hivemind_app.hive_posts hp WHERE hp.id = __post_id;
END IF;
RETURN QUERY
WITH hot AS MATERIALIZED -- bridge_get_ranked_post_by_hot_for_community
@ -503,13 +503,13 @@ BEGIN
hp1.id,
hp1.sc_hot AS hot,
blacklist.source
FROM live_posts_view hp1
JOIN hive_communities hc ON hp1.community_id = hc.id
LEFT OUTER JOIN blacklisted_by_observer_view blacklist ON (blacklist.observer_id = __observer_id AND blacklist.blacklisted_id = hp1.author_id)
FROM hivemind_app.live_posts_view hp1
JOIN hivemind_app.hive_communities hc ON hp1.community_id = hc.id
LEFT OUTER JOIN hivemind_app.blacklisted_by_observer_view blacklist ON (blacklist.observer_id = __observer_id AND blacklist.blacklisted_id = hp1.author_id)
WHERE hc.name = _community
AND NOT hp1.is_paidout
AND ( __post_id = 0 OR hp1.sc_hot < __hot_limit OR (hp1.sc_hot = __hot_limit AND hp1.id < __post_id) )
AND (NOT EXISTS (SELECT 1 FROM muted_accounts_by_id_view WHERE observer_id = __observer_id AND muted_id = hp1.author_id))
AND (NOT EXISTS (SELECT 1 FROM hivemind_app.muted_accounts_by_id_view WHERE observer_id = __observer_id AND muted_id = hp1.author_id))
ORDER BY hp1.sc_hot DESC, hp1.id DESC
LIMIT _limit
)
@ -553,25 +553,25 @@ BEGIN
hp.is_muted,
hot.source
FROM hot,
LATERAL get_post_view_by_id(hot.id) hp
LATERAL hivemind_app.get_post_view_by_id(hot.id) hp
ORDER BY hot.hot DESC, hot.id DESC
LIMIT _limit;
END
$function$
language plpgsql STABLE;
DROP FUNCTION IF EXISTS bridge_get_ranked_post_by_created_for_community;
CREATE FUNCTION bridge_get_ranked_post_by_created_for_community( in _community VARCHAR, in _author VARCHAR, in _permlink VARCHAR, in _limit SMALLINT, in _bridge_api BOOLEAN, in _observer VARCHAR )
RETURNS SETOF bridge_api_post
DROP FUNCTION IF EXISTS hivemind_app.bridge_get_ranked_post_by_created_for_community;
CREATE FUNCTION hivemind_app.bridge_get_ranked_post_by_created_for_community( in _community VARCHAR, in _author VARCHAR, in _permlink VARCHAR, in _limit SMALLINT, in _bridge_api BOOLEAN, in _observer VARCHAR )
RETURNS SETOF hivemind_app.bridge_api_post
AS
$function$
DECLARE
__post_id INT;
__observer_id INT;
BEGIN
__post_id = find_comment_id( _author, _permlink, True );
__observer_id = find_account_id( _observer, True );
IF __post_id <> 0 AND is_pinned( __post_id ) THEN
__post_id = hivemind_app.find_comment_id( _author, _permlink, True );
__observer_id = hivemind_app.find_account_id( _observer, True );
IF __post_id <> 0 AND hivemind_app.is_pinned( __post_id ) THEN
__post_id = 0;
END IF;
RETURN QUERY
@ -581,13 +581,13 @@ BEGIN
SELECT
hp1.id,
blacklist.source
FROM live_posts_view hp1
JOIN hive_communities hc ON hp1.community_id = hc.id
LEFT OUTER JOIN blacklisted_by_observer_view blacklist ON (blacklist.observer_id = __observer_id AND blacklist.blacklisted_id = hp1.author_id)
FROM hivemind_app.live_posts_view hp1
JOIN hivemind_app.hive_communities hc ON hp1.community_id = hc.id
LEFT OUTER JOIN hivemind_app.blacklisted_by_observer_view blacklist ON (blacklist.observer_id = __observer_id AND blacklist.blacklisted_id = hp1.author_id)
WHERE hc.name = _community
AND ( NOT _bridge_api OR NOT hp1.is_pinned ) -- concatenated with bridge_get_ranked_post_pinned_for_community when called for bridge_api
AND ( __post_id = 0 OR hp1.id < __post_id )
AND (NOT EXISTS (SELECT 1 FROM muted_accounts_by_id_view WHERE observer_id = __observer_id AND muted_id = hp1.author_id))
AND (NOT EXISTS (SELECT 1 FROM hivemind_app.muted_accounts_by_id_view WHERE observer_id = __observer_id AND muted_id = hp1.author_id))
ORDER BY hp1.id DESC
LIMIT _limit
)
@ -631,7 +631,7 @@ BEGIN
hp.is_muted,
created.source
FROM created,
LATERAL get_post_view_by_id(created.id) hp
LATERAL hivemind_app.get_post_view_by_id(created.id) hp
ORDER BY created.id DESC
LIMIT _limit;
END

View File

@ -1,14 +1,14 @@
DROP FUNCTION IF EXISTS bridge_get_ranked_post_by_created_for_observer_communities;
CREATE FUNCTION bridge_get_ranked_post_by_created_for_observer_communities( in _observer VARCHAR, in _author VARCHAR, in _permlink VARCHAR, in _limit SMALLINT )
RETURNS SETOF bridge_api_post
DROP FUNCTION IF EXISTS hivemind_app.bridge_get_ranked_post_by_created_for_observer_communities;
CREATE FUNCTION hivemind_app.bridge_get_ranked_post_by_created_for_observer_communities( in _observer VARCHAR, in _author VARCHAR, in _permlink VARCHAR, in _limit SMALLINT )
RETURNS SETOF hivemind_app.bridge_api_post
AS
$function$
DECLARE
__post_id INT;
__account_id INT;
BEGIN
__post_id = find_comment_id( _author, _permlink, True );
__account_id = find_account_id( _observer, True );
__post_id = hivemind_app.find_comment_id( _author, _permlink, True );
__account_id = hivemind_app.find_account_id( _observer, True );
RETURN QUERY
WITH post_ids AS MATERIALIZED -- bridge_get_ranked_post_by_created_for_observer_communities
(
@ -16,18 +16,18 @@ BEGIN
FROM
(
SELECT community_id
FROM hive_subscriptions
FROM hivemind_app.hive_subscriptions
WHERE account_id = __account_id
) communities
CROSS JOIN LATERAL
(
SELECT hp.id
FROM live_posts_view hp
JOIN hive_accounts on (hp.author_id = hive_accounts.id)
FROM hivemind_app.live_posts_view hp
JOIN hivemind_app.hive_accounts on (hp.author_id = hivemind_app.hive_accounts.id)
WHERE hp.community_id = communities.community_id
AND (__post_id = 0 OR hp.id < __post_id)
AND hive_accounts.reputation > '-464800000000'::bigint
AND (NOT EXISTS (SELECT 1 FROM muted_accounts_by_id_view WHERE observer_id = __account_id AND muted_id = hp.author_id))
AND hivemind_app.hive_accounts.reputation > '-464800000000'::bigint
AND (NOT EXISTS (SELECT 1 FROM hivemind_app.muted_accounts_by_id_view WHERE observer_id = __account_id AND muted_id = hp.author_id))
ORDER BY id DESC
LIMIT _limit
) posts
@ -74,16 +74,16 @@ BEGIN
hp.is_muted,
blacklist.source
FROM post_ids,
LATERAL get_post_view_by_id(post_ids.id) hp
LEFT OUTER JOIN blacklisted_by_observer_view blacklist ON (blacklist.observer_id = __account_id AND blacklist.blacklisted_id = hp.author_id)
LATERAL hivemind_app.get_post_view_by_id(post_ids.id) hp
LEFT OUTER JOIN hivemind_app.blacklisted_by_observer_view blacklist ON (blacklist.observer_id = __account_id AND blacklist.blacklisted_id = hp.author_id)
ORDER BY post_ids.id DESC;
END
$function$
language plpgsql STABLE;
DROP FUNCTION IF EXISTS bridge_get_ranked_post_by_hot_for_observer_communities;
CREATE FUNCTION bridge_get_ranked_post_by_hot_for_observer_communities( in _observer VARCHAR, in _author VARCHAR, in _permlink VARCHAR, in _limit SMALLINT )
RETURNS SETOF bridge_api_post
DROP FUNCTION IF EXISTS hivemind_app.bridge_get_ranked_post_by_hot_for_observer_communities;
CREATE FUNCTION hivemind_app.bridge_get_ranked_post_by_hot_for_observer_communities( in _observer VARCHAR, in _author VARCHAR, in _permlink VARCHAR, in _limit SMALLINT )
RETURNS SETOF hivemind_app.bridge_api_post
AS
$function$
DECLARE
@ -91,24 +91,24 @@ DECLARE
__hot_limit FLOAT;
__account_id INT;
BEGIN
__post_id = find_comment_id( _author, _permlink, True );
__post_id = hivemind_app.find_comment_id( _author, _permlink, True );
IF __post_id <> 0 THEN
SELECT hp.sc_hot INTO __hot_limit FROM hive_posts hp WHERE hp.id = __post_id;
SELECT hp.sc_hot INTO __hot_limit FROM hivemind_app.hive_posts hp WHERE hp.id = __post_id;
END IF;
__account_id = find_account_id( _observer, True );
__account_id = hivemind_app.find_account_id( _observer, True );
RETURN QUERY
WITH hot AS MATERIALIZED -- bridge_get_ranked_post_by_hot_for_observer_communities
(
SELECT
hp.id,
blacklist.source
FROM live_posts_view hp
JOIN hive_subscriptions hs ON hp.community_id = hs.community_id
LEFT OUTER JOIN blacklisted_by_observer_view blacklist ON (blacklist.observer_id = __account_id AND blacklist.blacklisted_id = hp.author_id)
FROM hivemind_app.live_posts_view hp
JOIN hivemind_app.hive_subscriptions hs ON hp.community_id = hs.community_id
LEFT OUTER JOIN hivemind_app.blacklisted_by_observer_view blacklist ON (blacklist.observer_id = __account_id AND blacklist.blacklisted_id = hp.author_id)
WHERE hs.account_id = __account_id
AND NOT hp.is_paidout
AND ( __post_id = 0 OR hp.sc_hot < __hot_limit OR ( hp.sc_hot = __hot_limit AND hp.id < __post_id ) )
AND (NOT EXISTS (SELECT 1 FROM muted_accounts_by_id_view WHERE observer_id = __account_id AND muted_id = hp.author_id))
AND (NOT EXISTS (SELECT 1 FROM hivemind_app.muted_accounts_by_id_view WHERE observer_id = __account_id AND muted_id = hp.author_id))
ORDER BY hp.sc_hot DESC, hp.id DESC
LIMIT _limit
)
@ -152,28 +152,28 @@ BEGIN
hp.is_muted,
hot.source
FROM hot,
LATERAL get_post_view_by_id(hot.id) hp
LATERAL hivemind_app.get_post_view_by_id(hot.id) hp
ORDER BY hp.sc_hot DESC, hp.id DESC
LIMIT _limit;
END
$function$
language plpgsql STABLE;
DROP FUNCTION IF EXISTS bridge_get_ranked_post_by_payout_comments_for_observer_communities;
CREATE FUNCTION bridge_get_ranked_post_by_payout_comments_for_observer_communities( in _observer VARCHAR, in _author VARCHAR, in _permlink VARCHAR, in _limit SMALLINT )
RETURNS SETOF bridge_api_post
DROP FUNCTION IF EXISTS hivemind_app.bridge_get_ranked_post_by_payout_comments_for_observer_communities;
CREATE FUNCTION hivemind_app.bridge_get_ranked_post_by_payout_comments_for_observer_communities( in _observer VARCHAR, in _author VARCHAR, in _permlink VARCHAR, in _limit SMALLINT )
RETURNS SETOF hivemind_app.bridge_api_post
AS
$function$
DECLARE
__post_id INT;
__payout_limit hive_posts.payout%TYPE;
__payout_limit hivemind_app.hive_posts.payout%TYPE;
__account_id INT;
BEGIN
__post_id = find_comment_id( _author, _permlink, True );
__post_id = hivemind_app.find_comment_id( _author, _permlink, True );
IF __post_id <> 0 THEN
SELECT ( hp.payout + hp.pending_payout ) INTO __payout_limit FROM hive_posts hp WHERE hp.id = __post_id;
SELECT ( hp.payout + hp.pending_payout ) INTO __payout_limit FROM hivemind_app.hive_posts hp WHERE hp.id = __post_id;
END IF;
__account_id = find_account_id( _observer, True );
__account_id = hivemind_app.find_account_id( _observer, True );
RETURN QUERY
WITH payout AS MATERIALIZED -- bridge_get_ranked_post_by_payout_comments_for_observer_communities
(
@ -181,14 +181,14 @@ BEGIN
hp1.id,
(hp1.payout + hp1.pending_payout) as total_payout,
blacklist.source
FROM live_comments_view hp1
JOIN hive_subscriptions hs ON hp1.community_id = hs.community_id
LEFT OUTER JOIN blacklisted_by_observer_view blacklist ON (blacklist.observer_id = __account_id AND blacklist.blacklisted_id = hp1.author_id)
FROM hivemind_app.live_comments_view hp1
JOIN hivemind_app.hive_subscriptions hs ON hp1.community_id = hs.community_id
LEFT OUTER JOIN hivemind_app.blacklisted_by_observer_view blacklist ON (blacklist.observer_id = __account_id AND blacklist.blacklisted_id = hp1.author_id)
WHERE hs.account_id = __account_id
AND NOT hp1.is_paidout
AND ( __post_id = 0 OR (hp1.payout + hp1.pending_payout) < __payout_limit
OR ((hp1.payout + hp1.pending_payout) = __payout_limit AND hp1.id < __post_id) )
AND (NOT EXISTS (SELECT 1 FROM muted_accounts_by_id_view WHERE observer_id = __account_id AND muted_id = hp1.author_id))
AND (NOT EXISTS (SELECT 1 FROM hivemind_app.muted_accounts_by_id_view WHERE observer_id = __account_id AND muted_id = hp1.author_id))
ORDER BY (hp1.payout + hp1.pending_payout) DESC, hp1.id DESC
LIMIT _limit
)
@ -232,30 +232,30 @@ SELECT
hp.is_muted,
payout.source
FROM payout,
LATERAL get_post_view_by_id(payout.id) hp
LATERAL hivemind_app.get_post_view_by_id(payout.id) hp
ORDER BY payout.total_payout DESC, payout.id DESC
LIMIT _limit;
END
$function$
language plpgsql STABLE;
DROP FUNCTION IF EXISTS bridge_get_ranked_post_by_payout_for_observer_communities;
CREATE FUNCTION bridge_get_ranked_post_by_payout_for_observer_communities( in _observer VARCHAR, in _author VARCHAR, in _permlink VARCHAR, in _limit SMALLINT )
RETURNS SETOF bridge_api_post
DROP FUNCTION IF EXISTS hivemind_app.bridge_get_ranked_post_by_payout_for_observer_communities;
CREATE FUNCTION hivemind_app.bridge_get_ranked_post_by_payout_for_observer_communities( in _observer VARCHAR, in _author VARCHAR, in _permlink VARCHAR, in _limit SMALLINT )
RETURNS SETOF hivemind_app.bridge_api_post
AS
$function$
DECLARE
__post_id INT;
__payout_limit hive_posts.payout%TYPE;
__payout_limit hivemind_app.hive_posts.payout%TYPE;
__head_block_time TIMESTAMP;
__account_id INT;
BEGIN
__post_id = find_comment_id( _author, _permlink, True );
__post_id = hivemind_app.find_comment_id( _author, _permlink, True );
IF __post_id <> 0 THEN
SELECT (hp.payout + hp.pending_payout) INTO __payout_limit FROM hive_posts hp WHERE hp.id = __post_id;
SELECT (hp.payout + hp.pending_payout) INTO __payout_limit FROM hivemind_app.hive_posts hp WHERE hp.id = __post_id;
END IF;
__account_id = find_account_id( _observer, True );
__head_block_time = head_block_time();
__account_id = hivemind_app.find_account_id( _observer, True );
__head_block_time = hivemind_app.head_block_time();
RETURN QUERY
WITH payout as MATERIALIZED -- bridge_get_ranked_post_by_payout_for_observer_communities
(
@ -263,15 +263,15 @@ BEGIN
hp.id,
(hp.payout + hp.pending_payout) as total_payout,
blacklist.source
FROM live_posts_comments_view hp
JOIN hive_subscriptions hs ON hp.community_id = hs.community_id
LEFT OUTER JOIN blacklisted_by_observer_view blacklist ON (blacklist.observer_id = __account_id AND blacklist.blacklisted_id = hp.author_id)
FROM hivemind_app.live_posts_comments_view hp
JOIN hivemind_app.hive_subscriptions hs ON hp.community_id = hs.community_id
LEFT OUTER JOIN hivemind_app.blacklisted_by_observer_view blacklist ON (blacklist.observer_id = __account_id AND blacklist.blacklisted_id = hp.author_id)
WHERE hs.account_id = __account_id
AND NOT hp.is_paidout
AND hp.payout_at BETWEEN head_block_time + interval '12 hours' AND __head_block_time + interval '36 hours'
AND ( __post_id = 0 OR (hp.payout + hp.pending_payout) < __payout_limit
OR ((hp.payout + hp.pending_payout) = __payout_limit AND hp.id < __post_id) )
AND (NOT EXISTS (SELECT 1 FROM muted_accounts_by_id_view WHERE observer_id = __account_id AND muted_id = hp.author_id))
AND (NOT EXISTS (SELECT 1 FROM hivemind_app.muted_accounts_by_id_view WHERE observer_id = __account_id AND muted_id = hp.author_id))
ORDER BY (hp.payout + hp.pending_payout) DESC, hp.id DESC
LIMIT _limit
)
@ -315,42 +315,42 @@ BEGIN
hp.is_muted,
payout.source
FROM payout,
LATERAL get_post_view_by_id(payout.id) hp
LATERAL hivemind_app.get_post_view_by_id(payout.id) hp
ORDER BY payout.total_payout DESC, payout.id DESC
LIMIT _limit;
END
$function$
language plpgsql STABLE;
DROP FUNCTION IF EXISTS bridge_get_ranked_post_by_promoted_for_observer_communities;
CREATE FUNCTION bridge_get_ranked_post_by_promoted_for_observer_communities( in _observer VARCHAR, in _author VARCHAR, in _permlink VARCHAR, in _limit SMALLINT )
RETURNS SETOF bridge_api_post
DROP FUNCTION IF EXISTS hivemind_app.bridge_get_ranked_post_by_promoted_for_observer_communities;
CREATE FUNCTION hivemind_app.bridge_get_ranked_post_by_promoted_for_observer_communities( in _observer VARCHAR, in _author VARCHAR, in _permlink VARCHAR, in _limit SMALLINT )
RETURNS SETOF hivemind_app.bridge_api_post
AS
$function$
DECLARE
__post_id INT;
__promoted_limit hive_posts.promoted%TYPE;
__promoted_limit hivemind_app.hive_posts.promoted%TYPE;
__account_id INT;
BEGIN
__post_id = find_comment_id( _author, _permlink, True );
__post_id = hivemind_app.find_comment_id( _author, _permlink, True );
IF __post_id <> 0 THEN
SELECT hp.promoted INTO __promoted_limit FROM hive_posts hp WHERE hp.id = __post_id;
SELECT hp.promoted INTO __promoted_limit FROM hivemind_app.hive_posts hp WHERE hp.id = __post_id;
END IF;
__account_id = find_account_id( _observer, True );
__account_id = hivemind_app.find_account_id( _observer, True );
RETURN QUERY
WITH promoted AS MATERIALIZED -- bridge_get_ranked_post_by_promoted_for_observer_communities
(
SELECT
hp.id,
blacklist.source
FROM live_posts_view hp
JOIN hive_subscriptions hs ON hp.community_id = hs.community_id
LEFT OUTER JOIN blacklisted_by_observer_view blacklist ON (blacklist.observer_id = __account_id AND blacklist.blacklisted_id = hp.author_id)
FROM hivemind_app.live_posts_view hp
JOIN hivemind_app.hive_subscriptions hs ON hp.community_id = hs.community_id
LEFT OUTER JOIN hivemind_app.blacklisted_by_observer_view blacklist ON (blacklist.observer_id = __account_id AND blacklist.blacklisted_id = hp.author_id)
WHERE hs.account_id = __account_id
AND NOT hp.is_paidout
AND hp.promoted > 0
AND ( __post_id = 0 OR hp.promoted < __promoted_limit OR ( hp.promoted = __promoted_limit AND hp.id < __post_id ) )
AND (NOT EXISTS (SELECT 1 FROM muted_accounts_by_id_view WHERE observer_id = __account_id AND muted_id = hp.author_id))
AND (NOT EXISTS (SELECT 1 FROM hivemind_app.muted_accounts_by_id_view WHERE observer_id = __account_id AND muted_id = hp.author_id))
ORDER BY hp.promoted DESC, hp.id DESC
LIMIT _limit
)
@ -394,16 +394,16 @@ BEGIN
hp.is_muted,
promoted.source
FROM promoted,
LATERAL get_post_view_by_id(promoted.id) hp
LATERAL hivemind_app.get_post_view_by_id(promoted.id) hp
ORDER BY hp.promoted DESC, hp.id DESC
LIMIT _limit;
END
$function$
language plpgsql STABLE;
DROP FUNCTION IF EXISTS bridge_get_ranked_post_by_trends_for_observer_communities;
CREATE OR REPLACE FUNCTION bridge_get_ranked_post_by_trends_for_observer_communities( in _observer VARCHAR, in _author VARCHAR, in _permlink VARCHAR, in _limit SMALLINT )
RETURNS SETOF bridge_api_post
DROP FUNCTION IF EXISTS hivemind_app.bridge_get_ranked_post_by_trends_for_observer_communities;
CREATE OR REPLACE FUNCTION hivemind_app.bridge_get_ranked_post_by_trends_for_observer_communities( in _observer VARCHAR, in _author VARCHAR, in _permlink VARCHAR, in _limit SMALLINT )
RETURNS SETOF hivemind_app.bridge_api_post
AS
$function$
DECLARE
@ -411,12 +411,12 @@ DECLARE
__account_id INT;
__trending_limit FLOAT := 0;
BEGIN
__post_id = find_comment_id( _author, _permlink, True );
__account_id = find_account_id( _observer, True );
__post_id = hivemind_app.find_comment_id( _author, _permlink, True );
__account_id = hivemind_app.find_account_id( _observer, True );
IF __post_id <> 0 THEN
SELECT hp.sc_trend INTO __trending_limit FROM hive_posts hp WHERE hp.id = __post_id;
SELECT hp.sc_trend INTO __trending_limit FROM hivemind_app.hive_posts hp WHERE hp.id = __post_id;
END IF;
__account_id = find_account_id( _observer, True );
__account_id = hivemind_app.find_account_id( _observer, True );
RETURN QUERY
WITH trending AS MATERIALIZED -- bridge_get_ranked_post_by_trends_for_observer_communities
(
@ -424,13 +424,13 @@ BEGIN
hp1.id,
hp1.sc_trend,
blacklist.source
FROM live_posts_view hp1
JOIN hive_subscriptions hs ON hp1.community_id = hs.community_id
LEFT OUTER JOIN blacklisted_by_observer_view blacklist ON (blacklist.observer_id = __account_id AND blacklist.blacklisted_id = hp1.author_id)
FROM hivemind_app.live_posts_view hp1
JOIN hivemind_app.hive_subscriptions hs ON hp1.community_id = hs.community_id
LEFT OUTER JOIN hivemind_app.blacklisted_by_observer_view blacklist ON (blacklist.observer_id = __account_id AND blacklist.blacklisted_id = hp1.author_id)
WHERE hs.account_id = __account_id
AND NOT hp1.is_paidout
AND ( __post_id = 0 OR hp1.sc_trend < __trending_limit OR ( hp1.sc_trend = __trending_limit AND hp1.id < __post_id ) )
AND (NOT EXISTS (SELECT 1 FROM muted_accounts_by_id_view WHERE observer_id = __account_id AND muted_id = hp1.author_id))
AND (NOT EXISTS (SELECT 1 FROM hivemind_app.muted_accounts_by_id_view WHERE observer_id = __account_id AND muted_id = hp1.author_id))
ORDER BY hp1.sc_trend DESC, hp1.id DESC
LIMIT _limit
)
@ -474,28 +474,28 @@ BEGIN
hp.is_muted,
trending.source
FROM trending,
LATERAL get_post_view_by_id(trending.id) hp
LATERAL hivemind_app.get_post_view_by_id(trending.id) hp
ORDER BY trending.sc_trend DESC, trending.id DESC
LIMIT _limit;
END
$function$
language plpgsql STABLE;
DROP FUNCTION IF EXISTS bridge_get_ranked_post_by_muted_for_observer_communities;
CREATE FUNCTION bridge_get_ranked_post_by_muted_for_observer_communities( in _observer VARCHAR, in _author VARCHAR, in _permlink VARCHAR, in _limit SMALLINT )
RETURNS SETOF bridge_api_post
DROP FUNCTION IF EXISTS hivemind_app.bridge_get_ranked_post_by_muted_for_observer_communities;
CREATE FUNCTION hivemind_app.bridge_get_ranked_post_by_muted_for_observer_communities( in _observer VARCHAR, in _author VARCHAR, in _permlink VARCHAR, in _limit SMALLINT )
RETURNS SETOF hivemind_app.bridge_api_post
AS
$function$
DECLARE
__post_id INT;
__payout_limit hive_posts.payout%TYPE;
__payout_limit hivemind_app.hive_posts.payout%TYPE;
__account_id INT;
BEGIN
__post_id = find_comment_id( _author, _permlink, True );
__post_id = hivemind_app.find_comment_id( _author, _permlink, True );
IF __post_id <> 0 THEN
SELECT ( hp.payout + hp.pending_payout ) INTO __payout_limit FROM hive_posts hp WHERE hp.id = __post_id;
SELECT ( hp.payout + hp.pending_payout ) INTO __payout_limit FROM hivemind_app.hive_posts hp WHERE hp.id = __post_id;
END IF;
__account_id = find_account_id( _observer, True );
__account_id = hivemind_app.find_account_id( _observer, True );
RETURN QUERY
WITH muted AS MATERIALIZED -- bridge_get_ranked_post_by_muted_for_observer_communities
(
@ -503,10 +503,10 @@ BEGIN
hp.id,
blacklist.source,
(hp.payout + hp.pending_payout) as total_payout
FROM live_posts_comments_view hp
JOIN hive_subscriptions hs ON hp.community_id = hs.community_id
JOIN hive_accounts_view ha ON ha.id = hp.author_id
LEFT OUTER JOIN blacklisted_by_observer_view blacklist ON (blacklist.observer_id = __account_id AND blacklist.blacklisted_id = hp.author_id)
FROM hivemind_app.live_posts_comments_view hp
JOIN hivemind_app.hive_subscriptions hs ON hp.community_id = hs.community_id
JOIN hivemind_app.hive_accounts_view ha ON ha.id = hp.author_id
LEFT OUTER JOIN hivemind_app.blacklisted_by_observer_view blacklist ON (blacklist.observer_id = __account_id AND blacklist.blacklisted_id = hp.author_id)
WHERE hs.account_id = __account_id
AND NOT hp.is_paidout
AND ha.is_grayed
@ -556,7 +556,7 @@ BEGIN
hp.is_muted,
muted.source
FROM muted,
LATERAL get_post_view_by_id(muted.id) hp
LATERAL hivemind_app.get_post_view_by_id(muted.id) hp
ORDER BY muted.total_payout DESC, hp.id DESC
LIMIT _limit;
END

View File

@ -1,6 +1,6 @@
DROP FUNCTION IF EXISTS bridge_get_ranked_post_by_created_for_tag;
CREATE OR REPLACE FUNCTION bridge_get_ranked_post_by_created_for_tag( _tag VARCHAR, _author VARCHAR, _permlink VARCHAR, _limit SMALLINT, _observer VARCHAR )
RETURNS SETOF bridge_api_post
DROP FUNCTION IF EXISTS hivemind_app.bridge_get_ranked_post_by_created_for_tag;
CREATE OR REPLACE FUNCTION hivemind_app.bridge_get_ranked_post_by_created_for_tag( _tag VARCHAR, _author VARCHAR, _permlink VARCHAR, _limit SMALLINT, _observer VARCHAR )
RETURNS SETOF hivemind_app.bridge_api_post
AS
$function$
DECLARE
@ -8,21 +8,21 @@ DECLARE
__hive_tag INT[];
__observer_id INT;
BEGIN
__post_id = find_comment_id( _author, _permlink, True );
__hive_tag = ARRAY_APPEND( __hive_tag, find_tag_id( _tag, True ));
__observer_id = find_account_id(_observer, True);
__post_id = hivemind_app.find_comment_id( _author, _permlink, True );
__hive_tag = ARRAY_APPEND( __hive_tag, hivemind_app.find_tag_id( _tag, True ));
__observer_id = hivemind_app.find_account_id(_observer, True);
RETURN QUERY
WITH created AS MATERIALIZED -- bridge_get_ranked_post_by_created_for_tag
(
SELECT
hp1.id,
blacklist.source
FROM live_posts_view hp1
JOIN hive_accounts_view ha ON hp1.author_id = ha.id
LEFT OUTER JOIN blacklisted_by_observer_view blacklist ON (blacklist.observer_id = __observer_id AND blacklist.blacklisted_id = hp1.author_id)
FROM hivemind_app.live_posts_view hp1
JOIN hivemind_app.hive_accounts_view ha ON hp1.author_id = ha.id
LEFT OUTER JOIN hivemind_app.blacklisted_by_observer_view blacklist ON (blacklist.observer_id = __observer_id AND blacklist.blacklisted_id = hp1.author_id)
WHERE hp1.tags_ids @> __hive_tag
AND ( __post_id = 0 OR hp1.id < __post_id )
AND NOT ha.is_grayed AND (NOT EXISTS (SELECT 1 FROM muted_accounts_by_id_view WHERE observer_id = __observer_id AND muted_id = hp1.author_id))
AND NOT ha.is_grayed AND (NOT EXISTS (SELECT 1 FROM hivemind_app.muted_accounts_by_id_view WHERE observer_id = __observer_id AND muted_id = hp1.author_id))
ORDER BY hp1.id DESC
LIMIT _limit
)
@ -66,16 +66,16 @@ BEGIN
hp.is_muted,
created.source
FROM created,
LATERAL get_post_view_by_id(created.id) hp
LATERAL hivemind_app.get_post_view_by_id(created.id) hp
ORDER BY created.id DESC
LIMIT _limit;
END
$function$
language plpgsql STABLE;
DROP FUNCTION IF EXISTS bridge_get_ranked_post_by_hot_for_tag;
CREATE FUNCTION bridge_get_ranked_post_by_hot_for_tag( in _tag VARCHAR, in _author VARCHAR, in _permlink VARCHAR, in _limit SMALLINT, in _observer VARCHAR )
RETURNS SETOF bridge_api_post
DROP FUNCTION IF EXISTS hivemind_app.bridge_get_ranked_post_by_hot_for_tag;
CREATE FUNCTION hivemind_app.bridge_get_ranked_post_by_hot_for_tag( in _tag VARCHAR, in _author VARCHAR, in _permlink VARCHAR, in _limit SMALLINT, in _observer VARCHAR )
RETURNS SETOF hivemind_app.bridge_api_post
AS
$function$
DECLARE
@ -84,12 +84,12 @@ DECLARE
__hive_tag INT[];
__observer_id INT;
BEGIN
__post_id = find_comment_id( _author, _permlink, True );
__post_id = hivemind_app.find_comment_id( _author, _permlink, True );
IF __post_id <> 0 THEN
SELECT hp.sc_hot INTO __hot_limit FROM hive_posts hp WHERE hp.id = __post_id;
SELECT hp.sc_hot INTO __hot_limit FROM hivemind_app.hive_posts hp WHERE hp.id = __post_id;
END IF;
__hive_tag = ARRAY_APPEND( __hive_tag, find_tag_id( _tag, True ));
__observer_id = find_account_id(_observer, True);
__hive_tag = ARRAY_APPEND( __hive_tag, hivemind_app.find_tag_id( _tag, True ));
__observer_id = hivemind_app.find_account_id(_observer, True);
RETURN QUERY
WITH hot AS MATERIALIZED -- bridge_get_ranked_post_by_hot_for_tag
(
@ -97,12 +97,12 @@ BEGIN
hp1.id,
hp1.sc_hot as hot,
blacklist.source
FROM live_posts_view hp1
LEFT OUTER JOIN blacklisted_by_observer_view blacklist ON (blacklist.observer_id = __observer_id AND blacklist.blacklisted_id = hp1.author_id)
FROM hivemind_app.live_posts_view hp1
LEFT OUTER JOIN hivemind_app.blacklisted_by_observer_view blacklist ON (blacklist.observer_id = __observer_id AND blacklist.blacklisted_id = hp1.author_id)
WHERE hp1.tags_ids @> __hive_tag
AND NOT hp1.is_paidout
AND ( __post_id = 0 OR hp1.sc_hot < __hot_limit OR ( hp1.sc_hot = __hot_limit AND hp1.id < __post_id ) )
AND (NOT EXISTS (SELECT 1 FROM muted_accounts_by_id_view WHERE observer_id = __observer_id AND muted_id = hp1.author_id))
AND (NOT EXISTS (SELECT 1 FROM hivemind_app.muted_accounts_by_id_view WHERE observer_id = __observer_id AND muted_id = hp1.author_id))
ORDER BY hp1.sc_hot DESC, hp1.id DESC
LIMIT _limit
)
@ -146,30 +146,30 @@ BEGIN
hp.is_muted,
hot.source
FROM hot,
LATERAL get_post_view_by_id(hot.id) hp
LATERAL hivemind_app.get_post_view_by_id(hot.id) hp
ORDER BY hot.hot DESC, hot.id DESC
LIMIT _limit;
END
$function$
language plpgsql STABLE;
DROP FUNCTION IF EXISTS bridge_get_ranked_post_by_muted_for_tag;
CREATE FUNCTION bridge_get_ranked_post_by_muted_for_tag( in _tag VARCHAR, in _author VARCHAR, in _permlink VARCHAR, in _limit SMALLINT, in _observer VARCHAR )
RETURNS SETOF bridge_api_post
DROP FUNCTION IF EXISTS hivemind_app.bridge_get_ranked_post_by_muted_for_tag;
CREATE FUNCTION hivemind_app.bridge_get_ranked_post_by_muted_for_tag( in _tag VARCHAR, in _author VARCHAR, in _permlink VARCHAR, in _limit SMALLINT, in _observer VARCHAR )
RETURNS SETOF hivemind_app.bridge_api_post
AS
$function$
DECLARE
__post_id INT;
__payout_limit hive_posts.payout%TYPE;
__payout_limit hivemind_app.hive_posts.payout%TYPE;
__hive_tag INT[];
__observer_id INT;
BEGIN
__post_id = find_comment_id( _author, _permlink, True );
__post_id = hivemind_app.find_comment_id( _author, _permlink, True );
IF __post_id <> 0 THEN
SELECT ( hp.payout + hp.pending_payout ) INTO __payout_limit FROM hive_posts hp WHERE hp.id = __post_id;
SELECT ( hp.payout + hp.pending_payout ) INTO __payout_limit FROM hivemind_app.hive_posts hp WHERE hp.id = __post_id;
END IF;
__hive_tag = ARRAY_APPEND( __hive_tag, find_tag_id( _tag, True ) );
__observer_id = find_account_id(_observer, True);
__hive_tag = ARRAY_APPEND( __hive_tag, hivemind_app.find_tag_id( _tag, True ) );
__observer_id = hivemind_app.find_account_id(_observer, True);
RETURN QUERY
WITH payout AS MATERIALIZED -- bridge_get_ranked_post_by_muted_for_tag
(
@ -177,10 +177,10 @@ BEGIN
hp1.id,
(hp1.payout + hp1.pending_payout) as total_payout,
blacklist.source
FROM live_posts_comments_view hp1
JOIN hive_accounts_view ha ON hp1.author_id = ha.id
LEFT OUTER JOIN blacklisted_by_observer_view blacklist ON (blacklist.observer_id = __observer_id AND blacklist.blacklisted_id = hp1.author_id)
WHERE hp1.tags_ids @> __hive_tag
FROM hivemind_app.live_posts_comments_view hp1
JOIN hivemind_app.hive_accounts_view ha ON hp1.author_id = ha.id
LEFT OUTER JOIN hivemind_app.blacklisted_by_observer_view blacklist ON (blacklist.observer_id = __observer_id AND blacklist.blacklisted_id = hp1.author_id)
WHERE hp1.tags_ids @> __hive_tag
AND NOT hp1.is_paidout
AND ha.is_grayed AND (hp1.payout + hp1.pending_payout) > 0
AND ( __post_id = 0 OR (hp1.payout + hp1.pending_payout) < __payout_limit
@ -228,30 +228,30 @@ BEGIN
hp.is_muted,
payout.source
FROM payout,
LATERAL get_post_view_by_id(payout.id) hp
LATERAL hivemind_app.get_post_view_by_id(payout.id) hp
ORDER BY payout.total_payout DESC, payout.id DESC
LIMIT _limit;
END
$function$
language plpgsql STABLE;
DROP FUNCTION IF EXISTS bridge_get_ranked_post_by_payout_comments_for_category;
CREATE FUNCTION bridge_get_ranked_post_by_payout_comments_for_category( in _category VARCHAR, in _author VARCHAR, in _permlink VARCHAR, in _limit SMALLINT, in _observer VARCHAR )
RETURNS SETOF bridge_api_post
DROP FUNCTION IF EXISTS hivemind_app.bridge_get_ranked_post_by_payout_comments_for_category;
CREATE FUNCTION hivemind_app.bridge_get_ranked_post_by_payout_comments_for_category( in _category VARCHAR, in _author VARCHAR, in _permlink VARCHAR, in _limit SMALLINT, in _observer VARCHAR )
RETURNS SETOF hivemind_app.bridge_api_post
AS
$function$
DECLARE
__post_id INT;
__payout_limit hive_posts.payout%TYPE;
__payout_limit hivemind_app.hive_posts.payout%TYPE;
__hive_category INT;
__observer_id INT;
BEGIN
__post_id = find_comment_id( _author, _permlink, True );
__post_id = hivemind_app.find_comment_id( _author, _permlink, True );
IF __post_id <> 0 THEN
SELECT ( hp.payout + hp.pending_payout ) INTO __payout_limit FROM hive_posts hp WHERE hp.id = __post_id;
SELECT ( hp.payout + hp.pending_payout ) INTO __payout_limit FROM hivemind_app.hive_posts hp WHERE hp.id = __post_id;
END IF;
__hive_category = find_category_id( _category, True );
__observer_id = find_account_id(_observer, True);
__hive_category = hivemind_app.find_category_id( _category, True );
__observer_id = hivemind_app.find_account_id(_observer, True);
RETURN QUERY
WITH payout AS MATERIALIZED -- bridge_get_ranked_post_by_payout_comments_for_category
(
@ -259,13 +259,13 @@ BEGIN
hp1.id,
(hp1.payout + hp1.pending_payout) as total_payout,
blacklist.source
FROM live_comments_view hp1
LEFT OUTER JOIN blacklisted_by_observer_view blacklist ON (blacklist.observer_id = __observer_id AND blacklist.blacklisted_id = hp1.author_id)
WHERE hp1.category_id = __hive_category
FROM hivemind_app.live_comments_view hp1
LEFT OUTER JOIN hivemind_app.blacklisted_by_observer_view blacklist ON (blacklist.observer_id = __observer_id AND blacklist.blacklisted_id = hp1.author_id)
WHERE hp1.category_id = __hive_category
AND NOT hp1.is_paidout
AND ( __post_id = 0 OR (hp1.payout + hp1.pending_payout) < __payout_limit
OR ((hp1.payout + hp1.pending_payout) = __payout_limit AND hp1.id < __post_id) )
AND (NOT EXISTS (SELECT 1 FROM muted_accounts_by_id_view WHERE observer_id = __observer_id AND muted_id = hp1.author_id))
AND (NOT EXISTS (SELECT 1 FROM hivemind_app.muted_accounts_by_id_view WHERE observer_id = __observer_id AND muted_id = hp1.author_id))
ORDER BY (hp1.payout + hp1.pending_payout) DESC, hp1.id DESC
LIMIT _limit
)
@ -309,32 +309,32 @@ BEGIN
hp.is_muted,
payout.source
FROM payout,
LATERAL get_post_view_by_id(payout.id) hp
LATERAL hivemind_app.get_post_view_by_id(payout.id) hp
ORDER BY payout.total_payout DESC, payout.id DESC
LIMIT _limit;
END
$function$
language plpgsql STABLE;
DROP FUNCTION IF EXISTS bridge_get_ranked_post_by_payout_for_category;
CREATE FUNCTION bridge_get_ranked_post_by_payout_for_category( in _category VARCHAR, in _author VARCHAR, in _permlink VARCHAR, in _limit SMALLINT, in _bridge_api BOOLEAN, in _observer VARCHAR )
RETURNS SETOF bridge_api_post
DROP FUNCTION IF EXISTS hivemind_app.bridge_get_ranked_post_by_payout_for_category;
CREATE FUNCTION hivemind_app.bridge_get_ranked_post_by_payout_for_category( in _category VARCHAR, in _author VARCHAR, in _permlink VARCHAR, in _limit SMALLINT, in _bridge_api BOOLEAN, in _observer VARCHAR )
RETURNS SETOF hivemind_app.bridge_api_post
AS
$function$
DECLARE
__post_id INT;
__payout_limit hive_posts.payout%TYPE;
__payout_limit hivemind_app.hive_posts.payout%TYPE;
__head_block_time TIMESTAMP;
__hive_category INT;
__observer_id INT;
BEGIN
__post_id = find_comment_id( _author, _permlink, True );
__post_id = hivemind_app.find_comment_id( _author, _permlink, True );
IF __post_id <> 0 THEN
SELECT ( hp.payout + hp.pending_payout ) INTO __payout_limit FROM hive_posts hp WHERE hp.id = __post_id;
SELECT ( hp.payout + hp.pending_payout ) INTO __payout_limit FROM hivemind_app.hive_posts hp WHERE hp.id = __post_id;
END IF;
__hive_category = find_category_id( _category, True );
__head_block_time = head_block_time();
__observer_id = find_account_id(_observer, True);
__hive_category = hivemind_app.find_category_id( _category, True );
__head_block_time = hivemind_app.head_block_time();
__observer_id = hivemind_app.find_account_id(_observer, True);
RETURN QUERY
WITH payout AS MATERIALIZED -- bridge_get_ranked_post_by_payout_for_category
(
@ -342,14 +342,14 @@ BEGIN
hp1.id,
(hp1.payout + hp1.pending_payout) as total_payout,
blacklist.source
FROM live_posts_comments_view hp1
LEFT OUTER JOIN blacklisted_by_observer_view blacklist ON (blacklist.observer_id = __observer_id AND blacklist.blacklisted_id = hp1.author_id)
WHERE hp1.category_id = __hive_category
FROM hivemind_app.live_posts_comments_view hp1
LEFT OUTER JOIN hivemind_app.blacklisted_by_observer_view blacklist ON (blacklist.observer_id = __observer_id AND blacklist.blacklisted_id = hp1.author_id)
WHERE hp1.category_id = __hive_category
AND NOT hp1.is_paidout
AND ( ( NOT _bridge_api AND hp1.depth = 0 ) OR ( _bridge_api AND hp1.payout_at BETWEEN __head_block_time + interval '12 hours' AND __head_block_time + interval '36 hours' ) )
AND ( __post_id = 0 OR (hp1.payout + hp1.pending_payout) < __payout_limit
OR ((hp1.payout + hp1.pending_payout) = __payout_limit AND hp1.id < __post_id) )
AND (NOT EXISTS (SELECT 1 FROM muted_accounts_by_id_view WHERE observer_id = __observer_id AND muted_id = hp1.author_id))
AND (NOT EXISTS (SELECT 1 FROM hivemind_app.muted_accounts_by_id_view WHERE observer_id = __observer_id AND muted_id = hp1.author_id))
ORDER BY (hp1.payout + hp1.pending_payout) DESC, hp1.id DESC
LIMIT _limit
)
@ -393,30 +393,30 @@ BEGIN
hp.is_muted,
payout.source
FROM payout,
LATERAL get_post_view_by_id(payout.id) hp
LATERAL hivemind_app.get_post_view_by_id(payout.id) hp
ORDER BY payout.total_payout DESC, payout.id DESC
LIMIT _limit;
END
$function$
language plpgsql STABLE;
DROP FUNCTION IF EXISTS bridge_get_ranked_post_by_promoted_for_tag;
CREATE FUNCTION bridge_get_ranked_post_by_promoted_for_tag( in _tag VARCHAR, in _author VARCHAR, in _permlink VARCHAR, in _limit SMALLINT, in _observer VARCHAR )
RETURNS SETOF bridge_api_post
DROP FUNCTION IF EXISTS hivemind_app.bridge_get_ranked_post_by_promoted_for_tag;
CREATE FUNCTION hivemind_app.bridge_get_ranked_post_by_promoted_for_tag( in _tag VARCHAR, in _author VARCHAR, in _permlink VARCHAR, in _limit SMALLINT, in _observer VARCHAR )
RETURNS SETOF hivemind_app.bridge_api_post
AS
$function$
DECLARE
__post_id INT;
__promoted_limit hive_posts.promoted%TYPE;
__promoted_limit hivemind_app.hive_posts.promoted%TYPE;
__hive_tag INT[];
__observer_id INT;
BEGIN
__post_id = find_comment_id( _author, _permlink, True );
__post_id = hivemind_app.find_comment_id( _author, _permlink, True );
IF __post_id <> 0 THEN
SELECT hp.promoted INTO __promoted_limit FROM hive_posts hp WHERE hp.id = __post_id;
SELECT hp.promoted INTO __promoted_limit FROM hivemind_app.hive_posts hp WHERE hp.id = __post_id;
END IF;
__hive_tag = ARRAY_APPEND( __hive_tag, find_tag_id( _tag, True ) );
__observer_id = find_account_id(_observer, True);
__hive_tag = ARRAY_APPEND( __hive_tag, hivemind_app.find_tag_id( _tag, True ) );
__observer_id = hivemind_app.find_account_id(_observer, True);
RETURN QUERY
WITH promoted AS MATERIALIZED -- bridge_get_ranked_post_by_promoted_for_tag
(
@ -424,14 +424,14 @@ BEGIN
hp1.id,
hp1.promoted,
blacklist.source
FROM live_posts_comments_view hp1 -- maybe should be live_posts_view? no, you can promote replies too (probably no one uses it nowadays anyway)
LEFT OUTER JOIN blacklisted_by_observer_view blacklist ON (blacklist.observer_id = __observer_id AND blacklist.blacklisted_id = hp1.author_id)
FROM hivemind_app.live_posts_comments_view hp1 -- maybe should be live_posts_view? no, you can promote replies too (probably no one uses it nowadays anyway)
LEFT OUTER JOIN hivemind_app.blacklisted_by_observer_view blacklist ON (blacklist.observer_id = __observer_id AND blacklist.blacklisted_id = hp1.author_id)
WHERE hp1.tags_ids @> __hive_tag
AND NOT hp1.is_paidout
AND hp1.promoted > 0
AND ( __post_id = 0 OR hp1.promoted < __promoted_limit
OR (hp1.promoted = __promoted_limit AND hp1.id < __post_id) )
AND (NOT EXISTS (SELECT 1 FROM muted_accounts_by_id_view WHERE observer_id = __observer_id AND muted_id = hp1.author_id))
AND (NOT EXISTS (SELECT 1 FROM hivemind_app.muted_accounts_by_id_view WHERE observer_id = __observer_id AND muted_id = hp1.author_id))
ORDER BY hp1.promoted DESC, hp1.id DESC
LIMIT _limit
)
@ -475,16 +475,16 @@ BEGIN
hp.is_muted,
promoted.source
FROM promoted,
LATERAL get_post_view_by_id(promoted.id) hp
LATERAL hivemind_app.get_post_view_by_id(promoted.id) hp
ORDER BY promoted.promoted DESC, promoted.id DESC
LIMIT _limit;
END
$function$
language plpgsql STABLE;
DROP FUNCTION IF EXISTS bridge_get_ranked_post_by_trends_for_tag;
CREATE FUNCTION bridge_get_ranked_post_by_trends_for_tag( in _tag VARCHAR, in _author VARCHAR, in _permlink VARCHAR, in _limit SMALLINT, in _observer VARCHAR )
RETURNS SETOF bridge_api_post
DROP FUNCTION IF EXISTS hivemind_app.bridge_get_ranked_post_by_trends_for_tag;
CREATE FUNCTION hivemind_app.bridge_get_ranked_post_by_trends_for_tag( in _tag VARCHAR, in _author VARCHAR, in _permlink VARCHAR, in _limit SMALLINT, in _observer VARCHAR )
RETURNS SETOF hivemind_app.bridge_api_post
AS
$function$
DECLARE
@ -493,12 +493,12 @@ DECLARE
__hive_tag INT[];
__observer_id INT;
BEGIN
__post_id = find_comment_id( _author, _permlink, True );
__post_id = hivemind_app.find_comment_id( _author, _permlink, True );
IF __post_id <> 0 THEN
SELECT hp.sc_trend INTO __trending_limit FROM hive_posts hp WHERE hp.id = __post_id;
SELECT hp.sc_trend INTO __trending_limit FROM hivemind_app.hive_posts hp WHERE hp.id = __post_id;
END IF;
__hive_tag = ARRAY_APPEND( __hive_tag, find_tag_id( _tag, True ));
__observer_id = find_account_id(_observer, True);
__hive_tag = ARRAY_APPEND( __hive_tag, hivemind_app.find_tag_id( _tag, True ));
__observer_id = hivemind_app.find_account_id(_observer, True);
RETURN QUERY
WITH trends AS MATERIALIZED -- bridge_get_ranked_post_by_trends_for_tag
(
@ -506,13 +506,13 @@ BEGIN
hp1.id,
hp1.sc_trend as trend,
blacklist.source
FROM live_posts_view hp1
LEFT OUTER JOIN blacklisted_by_observer_view blacklist ON (blacklist.observer_id = __observer_id AND blacklist.blacklisted_id = hp1.author_id)
WHERE hp1.tags_ids @> __hive_tag
FROM hivemind_app.live_posts_view hp1
LEFT OUTER JOIN hivemind_app.blacklisted_by_observer_view blacklist ON (blacklist.observer_id = __observer_id AND blacklist.blacklisted_id = hp1.author_id)
WHERE hp1.tags_ids @> __hive_tag
AND NOT hp1.is_paidout
AND ( __post_id = 0 OR hp1.sc_trend < __trending_limit
OR (hp1.sc_trend = __trending_limit AND hp1.id < __post_id) )
AND (NOT EXISTS (SELECT 1 FROM muted_accounts_by_id_view WHERE observer_id = __observer_id AND muted_id = hp1.author_id))
AND (NOT EXISTS (SELECT 1 FROM hivemind_app.muted_accounts_by_id_view WHERE observer_id = __observer_id AND muted_id = hp1.author_id))
ORDER BY hp1.sc_trend DESC, hp1.id DESC
LIMIT _limit
)
@ -556,7 +556,7 @@ SELECT
hp.is_muted,
trends.source
FROM trends,
LATERAL get_post_view_by_id(trends.id) hp
LATERAL hivemind_app.get_post_view_by_id(trends.id) hp
ORDER BY trends.trend DESC, trends.id DESC
LIMIT _limit;
END

View File

@ -1,5 +1,5 @@
DROP TYPE IF EXISTS bridge_api_post CASCADE;
CREATE TYPE bridge_api_post AS (
DROP TYPE IF EXISTS hivemind_app.bridge_api_post CASCADE;
CREATE TYPE hivemind_app.bridge_api_post AS (
id INTEGER,
author VARCHAR,
parent_author VARCHAR,
@ -40,9 +40,9 @@ CREATE TYPE bridge_api_post AS (
blacklists TEXT
);
DROP TYPE IF EXISTS bridge_api_post_reblogs CASCADE;
DROP TYPE IF EXISTS hivemind_app.bridge_api_post_reblogs CASCADE;
-- extension of bridge_api_post (same fields/types/order plus extras at the end)
CREATE TYPE bridge_api_post_reblogs AS (
CREATE TYPE hivemind_app.bridge_api_post_reblogs AS (
id INTEGER,
author VARCHAR,
parent_author VARCHAR,
@ -83,9 +83,9 @@ CREATE TYPE bridge_api_post_reblogs AS (
reblogged_by VARCHAR[]
);
DROP TYPE IF EXISTS bridge_api_post_discussion CASCADE;
DROP TYPE IF EXISTS hivemind_app.bridge_api_post_discussion CASCADE;
-- extension of bridge_api_post (same fields/types/order plus extras at the end)
CREATE TYPE bridge_api_post_discussion AS (
CREATE TYPE hivemind_app.bridge_api_post_discussion AS (
id INTEGER,
author VARCHAR,
parent_author VARCHAR,

View File

@ -1,21 +1,21 @@
DROP FUNCTION IF EXISTS bridge_get_relationship_between_accounts;
DROP FUNCTION IF EXISTS hivemind_app.bridge_get_relationship_between_accounts;
CREATE FUNCTION bridge_get_relationship_between_accounts( in _account1 VARCHAR, in _account2 VARCHAR,
out state hive_follows.state%TYPE,
out blacklisted hive_follows.blacklisted%TYPE,
out follow_blacklists hive_follows.follow_blacklists%TYPE,
out follow_muted hive_follows.follow_muted%TYPE,
out id hive_follows.id%TYPE,
out created_at hive_follows.created_at%TYPE,
out block_num hive_follows.block_num%TYPE)
CREATE FUNCTION hivemind_app.bridge_get_relationship_between_accounts( in _account1 VARCHAR, in _account2 VARCHAR,
out state hivemind_app.hive_follows.state%TYPE,
out blacklisted hivemind_app.hive_follows.blacklisted%TYPE,
out follow_blacklists hivemind_app.hive_follows.follow_blacklists%TYPE,
out follow_muted hivemind_app.hive_follows.follow_muted%TYPE,
out id hivemind_app.hive_follows.id%TYPE,
out created_at hivemind_app.hive_follows.created_at%TYPE,
out block_num hivemind_app.hive_follows.block_num%TYPE)
AS
$function$
DECLARE
__account1_id INT;
__account2_id INT;
BEGIN
__account1_id = find_account_id( _account1, True );
__account2_id = find_account_id( _account2, True );
__account1_id = hivemind_app.find_account_id( _account1, True );
__account2_id = hivemind_app.find_account_id( _account2, True );
SELECT
hf.state,
hf.blacklisted,
@ -33,7 +33,7 @@ BEGIN
created_at,
block_num
FROM
hive_follows hf
hivemind_app.hive_follows hf
WHERE
hf.follower = __account1_id AND hf.following = __account2_id;
END

View File

@ -1,29 +1,29 @@
DROP TYPE IF EXISTS bridge_api_list_all_subscriptions CASCADE;
CREATE TYPE bridge_api_list_all_subscriptions AS (
DROP TYPE IF EXISTS hivemind_app.bridge_api_list_all_subscriptions CASCADE;
CREATE TYPE hivemind_app.bridge_api_list_all_subscriptions AS (
name VARCHAR,
title VARCHAR,
role VARCHAR,
role_title VARCHAR
);
DROP FUNCTION IF EXISTS bridge_list_all_subscriptions
DROP FUNCTION IF EXISTS hivemind_app.bridge_list_all_subscriptions
;
CREATE OR REPLACE FUNCTION bridge_list_all_subscriptions(
in _account hive_accounts.name%TYPE
CREATE OR REPLACE FUNCTION hivemind_app.bridge_list_all_subscriptions(
in _account hivemind_app.hive_accounts.name%TYPE
)
RETURNS SETOF bridge_api_list_all_subscriptions
RETURNS SETOF hivemind_app.bridge_api_list_all_subscriptions
LANGUAGE plpgsql
AS
$function$
DECLARE
__account_id INT := find_account_id( _account, True );
__account_id INT := hivemind_app.find_account_id( _account, True );
BEGIN
RETURN QUERY
SELECT c.name, c.title, get_role_name(COALESCE(r.role_id, 0)), COALESCE(r.title, '')
FROM hive_communities c
JOIN hive_subscriptions s ON c.id = s.community_id
LEFT JOIN hive_roles r ON r.account_id = s.account_id
SELECT c.name, c.title, hivemind_app.get_role_name(COALESCE(r.role_id, 0)), COALESCE(r.title, '')
FROM hivemind_app.hive_communities c
JOIN hivemind_app.hive_subscriptions s ON c.id = s.community_id
LEFT JOIN hivemind_app.hive_roles r ON r.account_id = s.account_id
AND r.community_id = c.id
WHERE s.account_id = __account_id
ORDER BY COALESCE(role_id, 0) DESC, c.rank;

View File

@ -1,5 +1,5 @@
DROP TYPE IF EXISTS bridge_api_list_communities CASCADE;
CREATE TYPE bridge_api_list_communities AS (
DROP TYPE IF EXISTS hivemind_app.bridge_api_list_communities CASCADE;
CREATE TYPE hivemind_app.bridge_api_list_communities AS (
id INTEGER,
name VARCHAR(16),
title VARCHAR(32),
@ -17,24 +17,24 @@ CREATE TYPE bridge_api_list_communities AS (
admins VARCHAR ARRAY
);
DROP FUNCTION IF EXISTS bridge_list_communities_by_rank
DROP FUNCTION IF EXISTS hivemind_app.bridge_list_communities_by_rank
;
CREATE OR REPLACE FUNCTION bridge_list_communities_by_rank(
in _observer hive_accounts.name%TYPE,
in _last hive_accounts.name%TYPE,
CREATE OR REPLACE FUNCTION hivemind_app.bridge_list_communities_by_rank(
in _observer hivemind_app.hive_accounts.name%TYPE,
in _last hivemind_app.hive_accounts.name%TYPE,
in _search VARCHAR,
in _limit INT
)
RETURNS SETOF bridge_api_list_communities
RETURNS SETOF hivemind_app.bridge_api_list_communities
LANGUAGE plpgsql
AS
$function$
DECLARE
__last_id INT := find_community_id( _last, True );
__rank hive_communities.rank%TYPE = 0;
__last_id INT := hivemind_app.find_community_id( _last, True );
__rank hivemind_app.hive_communities.rank%TYPE = 0;
BEGIN
IF ( _last <> '' ) THEN
SELECT hc.rank INTO __rank FROM hive_communities hc WHERE hc.id = __last_id;
SELECT hc.rank INTO __rank FROM hivemind_app.hive_communities hc WHERE hc.id = __last_id;
END IF;
RETURN QUERY SELECT
hc.id,
@ -50,11 +50,11 @@ BEGIN
hc.num_authors,
hc.created_at::VARCHAR(19),
hc.avatar_url,
bridge_get_community_context(_observer, hc.name),
hivemind_app.bridge_get_community_context(_observer, hc.name),
array_agg(ha.name ORDER BY ha.name)
FROM hive_communities as hc
LEFT JOIN hive_roles hr ON hr.community_id = hc.id AND hr.role_id = 6
LEFT JOIN hive_accounts ha ON hr.account_id = ha.id
FROM hivemind_app.hive_communities as hc
LEFT JOIN hivemind_app.hive_roles hr ON hr.community_id = hc.id AND hr.role_id = 6
LEFT JOIN hivemind_app.hive_accounts ha ON hr.account_id = ha.id
WHERE hc.rank > __rank
AND (_search IS NULL OR to_tsvector('english', hc.title || ' ' || hc.about) @@ plainto_tsquery(_search))
GROUP BY hc.id
@ -65,20 +65,20 @@ END
$function$
;
DROP FUNCTION IF EXISTS bridge_list_communities_by_new
DROP FUNCTION IF EXISTS hivemind_app.bridge_list_communities_by_new
;
CREATE OR REPLACE FUNCTION bridge_list_communities_by_new(
in _observer hive_accounts.name%TYPE,
in _last hive_accounts.name%TYPE,
CREATE OR REPLACE FUNCTION hivemind_app.bridge_list_communities_by_new(
in _observer hivemind_app.hive_accounts.name%TYPE,
in _last hivemind_app.hive_accounts.name%TYPE,
in _search VARCHAR,
in _limit INT
)
RETURNS SETOF bridge_api_list_communities
RETURNS SETOF hivemind_app.bridge_api_list_communities
LANGUAGE plpgsql
AS
$function$
DECLARE
__last_id INT := find_community_id( _last, True );
__last_id INT := hivemind_app.find_community_id( _last, True );
BEGIN
RETURN QUERY SELECT
hc.id,
@ -94,11 +94,11 @@ BEGIN
hc.num_authors,
hc.created_at::VARCHAR(19),
hc.avatar_url,
bridge_get_community_context(_observer, hc.name),
hivemind_app.bridge_get_community_context(_observer, hc.name),
array_agg(ha.name ORDER BY ha.name)
FROM hive_communities as hc
LEFT JOIN hive_roles hr ON hr.community_id = hc.id AND hr.role_id = 6
LEFT JOIN hive_accounts ha ON hr.account_id = ha.id
FROM hivemind_app.hive_communities as hc
LEFT JOIN hivemind_app.hive_roles hr ON hr.community_id = hc.id AND hr.role_id = 6
LEFT JOIN hivemind_app.hive_accounts ha ON hr.account_id = ha.id
WHERE (__last_id = 0 OR hc.id < __last_id)
AND (_search IS NULL OR to_tsvector('english', hc.title || ' ' || hc.about) @@ plainto_tsquery(_search))
GROUP BY hc.id
@ -109,24 +109,24 @@ END
$function$
;
DROP FUNCTION IF EXISTS bridge_list_communities_by_subs
DROP FUNCTION IF EXISTS hivemind_app.bridge_list_communities_by_subs
;
CREATE OR REPLACE FUNCTION bridge_list_communities_by_subs(
in _observer hive_accounts.name%TYPE,
in _last hive_accounts.name%TYPE,
CREATE OR REPLACE FUNCTION hivemind_app.bridge_list_communities_by_subs(
in _observer hivemind_app.hive_accounts.name%TYPE,
in _last hivemind_app.hive_accounts.name%TYPE,
in _search VARCHAR,
in _limit INT
)
RETURNS SETOF bridge_api_list_communities
RETURNS SETOF hivemind_app.bridge_api_list_communities
LANGUAGE plpgsql
AS
$function$
DECLARE
__last_id INT := find_community_id( _last, True );
__subscribers hive_communities.subscribers%TYPE;
__last_id INT := hivemind_app.find_community_id( _last, True );
__subscribers hivemind_app.hive_communities.subscribers%TYPE;
BEGIN
IF ( _last <> '' ) THEN
SELECT hc.subscribers INTO __subscribers FROM hive_communities hc WHERE hc.id = __last_id;
SELECT hc.subscribers INTO __subscribers FROM hivemind_app.hive_communities hc WHERE hc.id = __last_id;
END IF;
RETURN QUERY SELECT
hc.id,
@ -142,11 +142,11 @@ BEGIN
hc.num_authors,
hc.created_at::VARCHAR(19),
hc.avatar_url,
bridge_get_community_context(_observer, hc.name),
hivemind_app.bridge_get_community_context(_observer, hc.name),
array_agg(ha.name ORDER BY ha.name)
FROM hive_communities as hc
LEFT JOIN hive_roles hr ON hr.community_id = hc.id AND hr.role_id = 6
LEFT JOIN hive_accounts ha ON hr.account_id = ha.id
FROM hivemind_app.hive_communities as hc
LEFT JOIN hivemind_app.hive_roles hr ON hr.community_id = hc.id AND hr.role_id = 6
LEFT JOIN hivemind_app.hive_accounts ha ON hr.account_id = ha.id
WHERE (__last_id = 0 OR hc.subscribers < __subscribers OR (hc.subscribers = __subscribers AND hc.id < __last_id))
AND (_search IS NULL OR to_tsvector('english', hc.title || ' ' || hc.about) @@ plainto_tsquery(_search))
GROUP BY hc.id

View File

@ -1,42 +1,42 @@
DROP TYPE IF EXISTS bridge_api_list_community_roles CASCADE;
CREATE TYPE bridge_api_list_community_roles AS (
DROP TYPE IF EXISTS hivemind_app.bridge_api_list_community_roles CASCADE;
CREATE TYPE hivemind_app.bridge_api_list_community_roles AS (
name VARCHAR(16),
role VARCHAR,
title VARCHAR
);
DROP TYPE IF EXISTS bridge_api_community_team CASCADE;
CREATE TYPE bridge_api_community_team AS (
DROP TYPE IF EXISTS hivemind_app.bridge_api_community_team CASCADE;
CREATE TYPE hivemind_app.bridge_api_community_team AS (
name VARCHAR,
role_id SMALLINT,
title VARCHAR
);
DROP FUNCTION IF EXISTS bridge_list_community_roles
DROP FUNCTION IF EXISTS hivemind_app.bridge_list_community_roles
;
CREATE OR REPLACE FUNCTION bridge_list_community_roles(
in _community hive_communities.name%TYPE,
in _last hive_accounts.name%TYPE,
CREATE OR REPLACE FUNCTION hivemind_app.bridge_list_community_roles(
in _community hivemind_app.hive_communities.name%TYPE,
in _last hivemind_app.hive_accounts.name%TYPE,
in _limit INT
)
RETURNS SETOF bridge_api_list_community_roles
RETURNS SETOF hivemind_app.bridge_api_list_community_roles
LANGUAGE plpgsql
AS
$function$
DECLARE
__last_role INT;
__community_id INT := find_community_id( _community, True );
__context bridge_api_community_context;
__team bridge_api_community_team;
__community_id INT := hivemind_app.find_community_id( _community, True );
__context hivemind_app.bridge_api_community_context;
__team hivemind_app.bridge_api_community_team;
BEGIN
IF _last <> '' THEN
SELECT INTO __last_role
COALESCE((
SELECT role_id
FROM hive_roles
WHERE account_id = (SELECT id from hive_accounts WHERE name = _last)
AND hive_roles.community_id = __community_id
FROM hivemind_app.hive_roles
WHERE account_id = (SELECT id from hivemind_app.hive_accounts WHERE name = _last)
AND hivemind_app.hive_roles.community_id = __community_id
),0);
IF __last_role = 0 THEN
@ -44,18 +44,18 @@ BEGIN
END IF;
RETURN QUERY
SELECT a.name, get_role_name(r.role_id), r.title
FROM hive_roles r
JOIN hive_accounts a ON r.account_id = a.id
SELECT a.name, hivemind_app.get_role_name(r.role_id), r.title
FROM hivemind_app.hive_roles r
JOIN hivemind_app.hive_accounts a ON r.account_id = a.id
WHERE r.community_id = __community_id
AND r.role_id != 0
AND (r.role_id < __last_role OR (r.role_id = __last_role AND a.name > _last))
ORDER BY r.role_id DESC, name LIMIT _limit;
ELSE
RETURN QUERY
SELECT a.name, get_role_name(r.role_id), r.title
FROM hive_roles r
JOIN hive_accounts a ON r.account_id = a.id
SELECT a.name, hivemind_app.get_role_name(r.role_id), r.title
FROM hivemind_app.hive_roles r
JOIN hivemind_app.hive_accounts a ON r.account_id = a.id
WHERE r.community_id = __community_id
AND r.role_id != 0
ORDER BY r.role_id DESC, name LIMIT _limit;

View File

@ -1,26 +1,26 @@
DROP TYPE IF EXISTS bridge_api_list_pop_communities CASCADE;
CREATE TYPE bridge_api_list_pop_communities AS (
DROP TYPE IF EXISTS hivemind_app.bridge_api_list_pop_communities CASCADE;
CREATE TYPE hivemind_app.bridge_api_list_pop_communities AS (
name VARCHAR,
title VARCHAR
);
DROP FUNCTION IF EXISTS bridge_list_pop_communities
DROP FUNCTION IF EXISTS hivemind_app.bridge_list_pop_communities
;
CREATE OR REPLACE FUNCTION bridge_list_pop_communities(
CREATE OR REPLACE FUNCTION hivemind_app.bridge_list_pop_communities(
in _limit INT
)
RETURNS SETOF bridge_api_list_pop_communities
RETURNS SETOF hivemind_app.bridge_api_list_pop_communities
LANGUAGE plpgsql
AS
$function$
BEGIN
RETURN QUERY
SELECT name, title
FROM hive_communities
FROM hivemind_app.hive_communities
JOIN (
SELECT community_id, COUNT(*) newsubs
FROM hive_subscriptions
WHERE created_at > head_block_time() - INTERVAL '1 MONTH'
FROM hivemind_app.hive_subscriptions
WHERE created_at > hivemind_app.head_block_time() - INTERVAL '1 MONTH'
GROUP BY community_id
) stats
ON stats.community_id = id

View File

@ -1,32 +1,32 @@
DROP TYPE IF EXISTS bridge_api_list_subscribers CASCADE;
CREATE TYPE bridge_api_list_subscribers AS (
DROP TYPE IF EXISTS hivemind_app.bridge_api_list_subscribers CASCADE;
CREATE TYPE hivemind_app.bridge_api_list_subscribers AS (
name VARCHAR,
role VARCHAR,
title VARCHAR,
created_at TIMESTAMP WITHOUT TIME ZONE
);
DROP FUNCTION IF EXISTS bridge_list_subscribers
DROP FUNCTION IF EXISTS hivemind_app.bridge_list_subscribers
;
CREATE OR REPLACE FUNCTION bridge_list_subscribers(
in _community hive_communities.name%TYPE,
in _last hive_accounts.name%TYPE,
CREATE OR REPLACE FUNCTION hivemind_app.bridge_list_subscribers(
in _community hivemind_app.hive_communities.name%TYPE,
in _last hivemind_app.hive_accounts.name%TYPE,
in _limit INT
)
RETURNS SETOF bridge_api_list_subscribers
RETURNS SETOF hivemind_app.bridge_api_list_subscribers
LANGUAGE plpgsql
AS
$function$
DECLARE
__community_id INT := find_community_id( _community, True );
__last_id INT := find_subscription_id(_last, _community, True);
__community_id INT := hivemind_app.find_community_id( _community, True );
__last_id INT := hivemind_app.find_subscription_id(_last, _community, True);
BEGIN
RETURN QUERY
SELECT ha.name, get_role_name(COALESCE(hr.role_id,0)), hr.title, hs.created_at
FROM hive_subscriptions hs
LEFT JOIN hive_roles hr ON hs.account_id = hr.account_id
SELECT ha.name, hivemind_app.get_role_name(COALESCE(hr.role_id,0)), hr.title, hs.created_at
FROM hivemind_app.hive_subscriptions hs
LEFT JOIN hivemind_app.hive_roles hr ON hs.account_id = hr.account_id
AND hs.community_id = hr.community_id
JOIN hive_accounts ha ON hs.account_id = ha.id
JOIN hivemind_app.hive_accounts ha ON hs.account_id = ha.id
WHERE hs.community_id = __community_id
AND (__last_id = 0 OR (
hs.id < __last_id

View File

@ -1,21 +1,21 @@
DROP TYPE IF EXISTS AccountReputation CASCADE;
DROP TYPE IF EXISTS hivemind_app.AccountReputation CASCADE;
CREATE TYPE AccountReputation AS (id int, reputation bigint, is_implicit boolean, changed boolean);
CREATE TYPE hivemind_app.AccountReputation AS (id int, reputation bigint, is_implicit boolean, changed boolean);
DROP FUNCTION IF EXISTS calculate_account_reputations;
DROP FUNCTION IF EXISTS hivemind_app.calculate_account_reputations;
--- Massive version of account reputation calculation.
CREATE OR REPLACE FUNCTION calculate_account_reputations(
CREATE OR REPLACE FUNCTION hivemind_app.calculate_account_reputations(
_first_block_num integer,
_last_block_num integer,
_tracked_account character varying DEFAULT NULL::character varying)
RETURNS SETOF accountreputation
RETURNS SETOF hivemind_app.accountreputation
LANGUAGE 'plpgsql'
STABLE
AS $BODY$
DECLARE
__vote_data RECORD;
__account_reputations AccountReputation[];
__account_reputations hivemind_app.AccountReputation[];
__author_rep bigint;
__new_author_rep bigint;
__voter_rep bigint;
@ -28,21 +28,21 @@ DECLARE
__traced_author int;
__account_name varchar;
BEGIN
SELECT INTO __account_reputations ARRAY(SELECT ROW(a.id, a.reputation, a.is_implicit, false)::AccountReputation
FROM hive_accounts a
SELECT INTO __account_reputations ARRAY(SELECT ROW(a.id, a.reputation, a.is_implicit, false)::hivemind_app.AccountReputation
FROM hivemind_app.hive_accounts a
WHERE a.id != 0
ORDER BY a.id);
-- SELECT COALESCE((SELECT ha.id FROM hive_accounts ha WHERE ha.name = _tracked_account), 0) INTO __traced_author;
-- SELECT COALESCE((SELECT ha.id FROM hivemind_app.hive_accounts ha WHERE ha.name = _tracked_account), 0) INTO __traced_author;
FOR __vote_data IN
SELECT rd.id, rd.author_id, rd.voter_id, rd.rshares,
COALESCE((SELECT prd.rshares
FROM hive_reputation_data prd
FROM hivemind_app.hive_reputation_data prd
WHERE prd.author_id = rd.author_id and prd.voter_id = rd.voter_id
and prd.permlink = rd.permlink and prd.id < rd.id
ORDER BY prd.id DESC LIMIT 1), 0) as prev_rshares
FROM hive_reputation_data rd
FROM hivemind_app.hive_reputation_data rd
WHERE (_first_block_num IS NULL AND _last_block_num IS NULL) OR (rd.block_num BETWEEN _first_block_num AND _last_block_num)
ORDER BY rd.id
LOOP
@ -51,7 +51,7 @@ BEGIN
/* IF __vote_data.author_id = __traced_author THEN
raise notice 'Processing vote <%> rshares: %, prev_rshares: %', __vote_data.id, __vote_data.rshares, __vote_data.prev_rshares;
select ha.name into __account_name from hive_accounts ha where ha.id = __vote_data.voter_id;
select ha.name into __account_name from hivemind_app.hive_accounts ha where ha.id = __vote_data.voter_id;
raise notice 'Voter `%` (%) reputation: %', __account_name, __vote_data.voter_id, __voter_rep;
END IF;
*/
@ -70,7 +70,7 @@ BEGIN
(__prev_rshares < 0 AND NOT __implicit_voter_rep AND __voter_rep > __author_rep - __prev_rep_delta)) THEN
__author_rep := __author_rep - __prev_rep_delta;
__implicit_author_rep := __author_rep = 0;
__account_reputations[__vote_data.author_id] := ROW(__vote_data.author_id, __author_rep, __implicit_author_rep, true)::AccountReputation;
__account_reputations[__vote_data.author_id] := ROW(__vote_data.author_id, __author_rep, __implicit_author_rep, true)::hivemind_app.AccountReputation;
/* IF __vote_data.author_id = __traced_author THEN
raise notice 'Corrected author_rep by prev_rep_delta: % to have reputation: %', __prev_rep_delta, __author_rep;
END IF;
@ -86,7 +86,7 @@ BEGIN
__rep_delta := (__rshares >> 6)::bigint;
__new_author_rep = __author_rep + __rep_delta;
__account_reputations[__vote_data.author_id] := ROW(__vote_data.author_id, __new_author_rep, False, true)::AccountReputation;
__account_reputations[__vote_data.author_id] := ROW(__vote_data.author_id, __new_author_rep, False, true)::hivemind_app.AccountReputation;
/* IF __vote_data.author_id = __traced_author THEN
raise notice 'Changing account: <%> reputation from % to %', __vote_data.author_id, __author_rep, __new_author_rep;
END IF;
@ -108,11 +108,11 @@ END
$BODY$
;
DROP FUNCTION IF EXISTS calculate_account_reputations_for_block;
DROP FUNCTION IF EXISTS hivemind_app.calculate_account_reputations_for_block;
DROP TABLE IF EXISTS __new_reputation_data;
DROP TABLE IF EXISTS hivemind_app.__new_reputation_data;
CREATE UNLOGGED TABLE IF NOT EXISTS __new_reputation_data
CREATE UNLOGGED TABLE IF NOT EXISTS hivemind_app.__new_reputation_data
(
id integer,
author_id integer,
@ -121,9 +121,9 @@ CREATE UNLOGGED TABLE IF NOT EXISTS __new_reputation_data
prev_rshares bigint
);
DROP TABLE IF EXISTS __tmp_accounts;
DROP TABLE IF EXISTS hivemind_app.__tmp_accounts;
CREATE UNLOGGED TABLE IF NOT EXISTS __tmp_accounts
CREATE UNLOGGED TABLE IF NOT EXISTS hivemind_app.__tmp_accounts
(
id integer,
reputation bigint,
@ -131,8 +131,8 @@ CREATE UNLOGGED TABLE IF NOT EXISTS __tmp_accounts
changed boolean
);
CREATE OR REPLACE FUNCTION calculate_account_reputations_for_block(_block_num INT, _tracked_account VARCHAR DEFAULT NULL::VARCHAR)
RETURNS SETOF accountreputation
CREATE OR REPLACE FUNCTION hivemind_app.calculate_account_reputations_for_block(_block_num INT, _tracked_account VARCHAR DEFAULT NULL::VARCHAR)
RETURNS SETOF hivemind_app.accountreputation
LANGUAGE 'plpgsql'
VOLATILE
AS $BODY$
@ -152,48 +152,48 @@ DECLARE
__account_name varchar;
BEGIN
DELETE FROM __new_reputation_data;
DELETE FROM hivemind_app.__new_reputation_data;
INSERT INTO __new_reputation_data
INSERT INTO hivemind_app.__new_reputation_data
SELECT rd.id, rd.author_id, rd.voter_id, rd.rshares,
COALESCE((SELECT prd.rshares
FROM hive_reputation_data prd
FROM hivemind_app.hive_reputation_data prd
WHERE prd.author_id = rd.author_id AND prd.voter_id = rd.voter_id
AND prd.permlink = rd.permlink AND prd.id < rd.id
ORDER BY prd.id DESC LIMIT 1), 0) AS prev_rshares
FROM hive_reputation_data rd
FROM hivemind_app.hive_reputation_data rd
WHERE rd.block_num = _block_num
ORDER BY rd.id
;
DELETE FROM __tmp_accounts;
DELETE FROM hivemind_app.__tmp_accounts;
INSERT INTO __tmp_accounts
INSERT INTO hivemind_app.__tmp_accounts
SELECT ha.id, ha.reputation, ha.is_implicit, false AS changed
FROM __new_reputation_data rd
JOIN hive_accounts ha on rd.author_id = ha.id
FROM hivemind_app.__new_reputation_data rd
JOIN hivemind_app.hive_accounts ha on rd.author_id = ha.id
UNION
SELECT hv.id, hv.reputation, hv.is_implicit, false as changed
FROM __new_reputation_data rd
JOIN hive_accounts hv on rd.voter_id = hv.id
FROM hivemind_app.__new_reputation_data rd
JOIN hivemind_app.hive_accounts hv on rd.voter_id = hv.id
;
-- SELECT COALESCE((SELECT ha.id FROM hive_accounts ha WHERE ha.name = _tracked_account), 0) INTO __traced_author;
-- SELECT COALESCE((SELECT ha.id FROM hivemind_app.hive_accounts ha WHERE ha.name = _tracked_account), 0) INTO __traced_author;
FOR __vote_data IN
SELECT rd.id, rd.author_id, rd.voter_id, rd.rshares, rd.prev_rshares
FROM __new_reputation_data rd
FROM hivemind_app.__new_reputation_data rd
ORDER BY rd.id
LOOP
SELECT INTO __voter_rep, __implicit_voter_rep ha.reputation, ha.is_implicit
FROM __tmp_accounts ha where ha.id = __vote_data.voter_id;
FROM hivemind_app.__tmp_accounts ha where ha.id = __vote_data.voter_id;
SELECT INTO __author_rep, __implicit_author_rep ha.reputation, ha.is_implicit
FROM __tmp_accounts ha where ha.id = __vote_data.author_id;
FROM hivemind_app.__tmp_accounts ha where ha.id = __vote_data.author_id;
/* IF __vote_data.author_id = __traced_author THEN
raise notice 'Processing vote <%> rshares: %, prev_rshares: %', __vote_data.id, __vote_data.rshares, __vote_data.prev_rshares;
select ha.name into __account_name from hive_accounts ha where ha.id = __vote_data.voter_id;
select ha.name into __account_name from hivemind_app.hive_accounts ha where ha.id = __vote_data.voter_id;
raise notice 'Voter `%` (%) reputation: %', __account_name, __vote_data.voter_id, __voter_rep;
END IF;
*/
@ -228,7 +228,7 @@ BEGIN
__new_author_rep = __author_rep + __rep_delta;
__author_rep_changed = true;
UPDATE __tmp_accounts
UPDATE hivemind_app.__tmp_accounts
SET reputation = __new_author_rep,
is_implicit = False,
changed = true
@ -247,16 +247,16 @@ BEGIN
END LOOP;
RETURN QUERY SELECT id, reputation, is_implicit, Changed
FROM __tmp_accounts
FROM hivemind_app.__tmp_accounts
WHERE Reputation IS NOT NULL AND Changed
;
END
$BODY$
;
DROP FUNCTION IF EXISTS truncate_account_reputation_data;
DROP FUNCTION IF EXISTS hivemind_app.truncate_account_reputation_data;
CREATE OR REPLACE FUNCTION truncate_account_reputation_data(
CREATE OR REPLACE FUNCTION hivemind_app.truncate_account_reputation_data(
in _day_limit INTERVAL,
in _allow_truncate BOOLEAN)
RETURNS VOID
@ -267,23 +267,23 @@ DECLARE
__block_num_limit INT;
BEGIN
__block_num_limit = block_before_head(_day_limit);
__block_num_limit = hivemind_app.block_before_head(_day_limit);
IF _allow_truncate THEN
DROP TABLE IF EXISTS __actual_reputation_data;
CREATE UNLOGGED TABLE IF NOT EXISTS __actual_reputation_data
DROP TABLE IF EXISTS hivemind_app.__actual_reputation_data;
CREATE UNLOGGED TABLE IF NOT EXISTS hivemind_app.__actual_reputation_data
AS
SELECT * FROM hive_reputation_data hrd
SELECT * FROM hivemind_app.hive_reputation_data hrd
WHERE hrd.block_num >= __block_num_limit;
TRUNCATE TABLE hive_reputation_data;
INSERT INTO hive_reputation_data
SELECT * FROM __actual_reputation_data;
TRUNCATE TABLE hivemind_app.hive_reputation_data;
INSERT INTO hivemind_app.hive_reputation_data
SELECT * FROM hivemind_app.__actual_reputation_data;
TRUNCATE TABLE __actual_reputation_data;
DROP TABLE IF EXISTS __actual_reputation_data;
TRUNCATE TABLE hivemind_app.__actual_reputation_data;
DROP TABLE IF EXISTS hivemind_app.__actual_reputation_data;
ELSE
DELETE FROM hive_reputation_data hpd
DELETE FROM hivemind_app.hive_reputation_data hpd
WHERE hpd.block_num < __block_num_limit
;
END IF;
@ -292,9 +292,9 @@ $BODY$
;
DROP FUNCTION IF EXISTS update_account_reputations;
DROP FUNCTION IF EXISTS hivemind_app.update_account_reputations;
CREATE OR REPLACE FUNCTION update_account_reputations(
CREATE OR REPLACE FUNCTION hivemind_app.update_account_reputations(
in _first_block_num INTEGER,
in _last_block_num INTEGER,
in _force_data_truncate BOOLEAN)
@ -307,19 +307,19 @@ DECLARE
__truncate_block_count INT := 1*24*1200*3; --- 1day
BEGIN
UPDATE hive_accounts urs
UPDATE hivemind_app.hive_accounts urs
SET reputation = ds.reputation,
is_implicit = ds.is_implicit
FROM
(
SELECT p.id as account_id, p.reputation, p.is_implicit
FROM calculate_account_reputations(_first_block_num, _last_block_num) p
FROM hivemind_app.calculate_account_reputations(_first_block_num, _last_block_num) p
WHERE _first_block_num IS NULL OR _last_block_num IS NULL OR _first_block_num != _last_block_num
UNION ALL
SELECT p.id as account_id, p.reputation, p.is_implicit
FROM calculate_account_reputations_for_block(_first_block_num) p
FROM hivemind_app.calculate_account_reputations_for_block(_first_block_num) p
WHERE _first_block_num IS NOT NULL AND _last_block_num IS NOT NULL AND _first_block_num = _last_block_num
) ds
@ -327,7 +327,7 @@ BEGIN
;
IF _force_data_truncate or _last_block_num IS NULL OR MOD(_last_block_num, __truncate_block_count) = 0 THEN
PERFORM truncate_account_reputation_data(__truncate_interval, _force_data_truncate);
PERFORM hivemind_app.truncate_account_reputation_data(__truncate_interval, _force_data_truncate);
END IF
;
END

View File

@ -1,6 +1,6 @@
DROP TYPE IF EXISTS condenser_api_post_ex CASCADE;
DROP TYPE IF EXISTS hivemind_app.condenser_api_post_ex CASCADE;
-- type for fat node style post of get_content()
CREATE TYPE condenser_api_post_ex AS (
CREATE TYPE hivemind_app.condenser_api_post_ex AS (
id INT,
author VARCHAR(16),
permlink VARCHAR(255),

View File

@ -1,6 +1,6 @@
DROP TYPE IF EXISTS condenser_api_post CASCADE;
DROP TYPE IF EXISTS hivemind_app.condenser_api_post CASCADE;
-- type for regular condenser_api posts
CREATE TYPE condenser_api_post AS (
CREATE TYPE hivemind_app.condenser_api_post AS (
id INT,
entry_id INT, -- used for paging with offset (otherwise can be any value)
author VARCHAR(16),
@ -18,7 +18,7 @@ CREATE TYPE condenser_api_post AS (
children INT,
created_at TIMESTAMP,
updated_at TIMESTAMP,
reblogged_at TIMESTAMP, -- used when post data is combined with hive_feed_cache (otherwise can be date)
reblogged_at TIMESTAMP, -- used when post data is combined with hivemind_app.hive_feed_cache (otherwise can be date)
rshares NUMERIC,
json TEXT,
parent_author VARCHAR(16),

View File

@ -1,39 +1,39 @@
DROP FUNCTION IF EXISTS condenser_get_follow_count;
CREATE FUNCTION condenser_get_follow_count( in _account VARCHAR,
out following hive_accounts.following%TYPE, out followers hive_accounts.followers%TYPE )
DROP FUNCTION IF EXISTS hivemind_app.condenser_get_follow_count;
CREATE FUNCTION hivemind_app.condenser_get_follow_count( in _account VARCHAR,
out following hivemind_app.hive_accounts.following%TYPE, out followers hivemind_app.hive_accounts.followers%TYPE )
AS
$function$
DECLARE
__account_id INT;
BEGIN
__account_id = find_account_id( _account, True );
SELECT ha.following, ha.followers INTO following, followers FROM hive_accounts ha WHERE ha.id = __account_id;
-- following equals (SELECT COUNT(*) FROM hive_follows WHERE state = 1 AND following = __account_id)
-- followers equals (SELECT COUNT(*) FROM hive_follows WHERE state = 1 AND follower = __account_id)
__account_id = hivemind_app.find_account_id( _account, True );
SELECT ha.following, ha.followers INTO following, followers FROM hivemind_app.hive_accounts ha WHERE ha.id = __account_id;
-- following equals (SELECT COUNT(*) FROM hivemind_app.hive_follows WHERE state = 1 AND following = __account_id)
-- followers equals (SELECT COUNT(*) FROM hivemind_app.hive_follows WHERE state = 1 AND follower = __account_id)
END
$function$
language plpgsql STABLE;
DROP FUNCTION IF EXISTS condenser_get_followers;
DROP FUNCTION IF EXISTS hivemind_app.condenser_get_followers;
-- list of account names that follow/ignore given account
CREATE FUNCTION condenser_get_followers( in _account VARCHAR, in _start VARCHAR, in _type INT, in _limit INT )
RETURNS SETOF hive_accounts.name%TYPE
CREATE FUNCTION hivemind_app.condenser_get_followers( in _account VARCHAR, in _start VARCHAR, in _type INT, in _limit INT )
RETURNS SETOF hivemind_app.hive_accounts.name%TYPE
AS
$function$
DECLARE
__account_id INT;
__start_id INT;
BEGIN
__account_id = find_account_id( _account, True );
__start_id = find_account_id( _start, True );
__account_id = hivemind_app.find_account_id( _account, True );
__start_id = hivemind_app.find_account_id( _start, True );
IF __start_id <> 0 THEN
SELECT INTO __start_id ( SELECT id FROM hive_follows WHERE following = __account_id AND follower = __start_id );
SELECT INTO __start_id ( SELECT id FROM hivemind_app.hive_follows WHERE following = __account_id AND follower = __start_id );
END IF;
RETURN QUERY SELECT
ha.name
FROM
hive_follows hf
JOIN hive_accounts ha ON hf.follower = ha.id
hivemind_app.hive_follows hf
JOIN hivemind_app.hive_accounts ha ON hf.follower = ha.id
WHERE
hf.following = __account_id AND hf.state = _type AND ( __start_id = 0 OR hf.id < __start_id )
ORDER BY hf.id DESC
@ -42,27 +42,27 @@ END
$function$
language plpgsql STABLE;
DROP FUNCTION IF EXISTS condenser_get_following;
DROP FUNCTION IF EXISTS hivemind_app.condenser_get_following;
-- list of account names followed/ignored by given account
CREATE FUNCTION condenser_get_following( in _account VARCHAR, in _start VARCHAR, in _type INT, in _limit INT )
RETURNS SETOF hive_accounts.name%TYPE
CREATE FUNCTION hivemind_app.condenser_get_following( in _account VARCHAR, in _start VARCHAR, in _type INT, in _limit INT )
RETURNS SETOF hivemind_app.hive_accounts.name%TYPE
AS
$function$
DECLARE
__account_id INT;
__start_id INT;
BEGIN
__account_id = find_account_id( _account, True );
__start_id = find_account_id( _start, True );
__account_id = hivemind_app.find_account_id( _account, True );
__start_id = hivemind_app.find_account_id( _start, True );
IF __start_id <> 0 THEN
SELECT INTO __start_id ( SELECT id FROM hive_follows WHERE follower = __account_id AND following = __start_id );
SELECT INTO __start_id ( SELECT id FROM hivemind_app.hive_follows WHERE follower = __account_id AND following = __start_id );
END IF;
RETURN QUERY
WITH following_set AS MATERIALIZED --- condenser_get_following
(
SELECT
hf.id, hf.following
FROM hive_follows hf
FROM hivemind_app.hive_follows hf
WHERE hf.follower = __account_id AND hf.state = _type AND ( __start_id = 0 OR hf.id < __start_id )
ORDER BY hf.id + 1 DESC --- + 1 is important hack for Postgres Intelligence to use dedicated index and avoid choosing PK index and performing a linear filtering on it
LIMIT _limit
@ -70,7 +70,7 @@ BEGIN
SELECT
ha.name
FROM following_set fs
JOIN hive_accounts ha ON fs.following = ha.id
JOIN hivemind_app.hive_accounts ha ON fs.following = ha.id
ORDER BY fs.id DESC
LIMIT _limit;

View File

@ -1,13 +1,13 @@
DROP FUNCTION IF EXISTS condenser_get_account_reputations;
DROP FUNCTION IF EXISTS hivemind_app.condenser_get_account_reputations;
CREATE OR REPLACE FUNCTION condenser_get_account_reputations(
CREATE OR REPLACE FUNCTION hivemind_app.condenser_get_account_reputations(
in _account_lower_bound VARCHAR,
in _limit INTEGER
)
RETURNS TABLE
(
name hive_accounts.name%TYPE,
reputation hive_accounts.reputation%TYPE
name hivemind_app.hive_accounts.name%TYPE,
reputation hivemind_app.hive_accounts.reputation%TYPE
)
AS
$function$
@ -17,7 +17,7 @@ BEGIN
RETURN QUERY SELECT
ha.name, ha.reputation
FROM hive_accounts ha
FROM hivemind_app.hive_accounts ha
WHERE ha.name >= _account_lower_bound AND ha.id != 0 -- don't include artificial empty account
ORDER BY name
LIMIT _limit;

View File

@ -1,12 +1,12 @@
DROP FUNCTION IF EXISTS condenser_get_blog_helper CASCADE;
CREATE FUNCTION condenser_get_blog_helper( in _blogger VARCHAR, in _last INT, in _limit INT,
DROP FUNCTION IF EXISTS hivemind_app.condenser_get_blog_helper CASCADE;
CREATE FUNCTION hivemind_app.condenser_get_blog_helper( in _blogger VARCHAR, in _last INT, in _limit INT,
out _account_id INT, out _offset INT, out _new_limit INT )
AS
$function$
BEGIN
_account_id = find_account_id( _blogger, True );
_account_id = hivemind_app.find_account_id( _blogger, True );
IF _last < 0 THEN -- caller wants "most recent" page
SELECT INTO _last ( SELECT COUNT(1) - 1 FROM hive_feed_cache hfc WHERE hfc.account_id = _account_id );
SELECT INTO _last ( SELECT COUNT(1) - 1 FROM hivemind_app.hive_feed_cache hfc WHERE hfc.account_id = _account_id );
_offset = _last - _limit + 1;
IF _offset < 0 THEN
_offset = 0;
@ -23,17 +23,17 @@ END
$function$
language plpgsql STABLE;
DROP FUNCTION IF EXISTS condenser_get_blog;
DROP FUNCTION IF EXISTS hivemind_app.condenser_get_blog;
-- blog posts [ _last - _limit + 1, _last ] oldest first (reverted by caller)
CREATE FUNCTION condenser_get_blog( in _blogger VARCHAR, in _last INT, in _limit INT )
RETURNS SETOF condenser_api_post
CREATE FUNCTION hivemind_app.condenser_get_blog( in _blogger VARCHAR, in _last INT, in _limit INT )
RETURNS SETOF hivemind_app.condenser_api_post
AS
$function$
DECLARE
__account_id INT;
__offset INT;
BEGIN
SELECT h.* INTO __account_id, __offset, _limit FROM condenser_get_blog_helper( _blogger, _last, _limit ) h;
SELECT h.* INTO __account_id, __offset, _limit FROM hivemind_app.condenser_get_blog_helper( _blogger, _last, _limit ) h;
RETURN QUERY SELECT
hp.id,
blog.entry_id::INT,
@ -73,30 +73,30 @@ BEGIN
SELECT
hfc.created_at, hfc.post_id, row_number() over (ORDER BY hfc.created_at ASC, hfc.post_id ASC) - 1 as entry_id
FROM
hive_feed_cache hfc
hivemind_app.hive_feed_cache hfc
WHERE
hfc.account_id = __account_id
ORDER BY hfc.created_at ASC, hfc.post_id ASC
LIMIT _limit
OFFSET __offset
) as blog,
LATERAL get_post_view_by_id(blog.post_id) hp
LATERAL hivemind_app.get_post_view_by_id(blog.post_id) hp
ORDER BY blog.created_at ASC, blog.post_id ASC;
END
$function$
language plpgsql STABLE;
DROP FUNCTION IF EXISTS condenser_get_blog_entries;
DROP FUNCTION IF EXISTS hivemind_app.condenser_get_blog_entries;
-- blog entries [ _last - _limit + 1, _last ] oldest first (reverted by caller)
CREATE FUNCTION condenser_get_blog_entries( in _blogger VARCHAR, in _last INT, in _limit INT )
RETURNS TABLE( entry_id INT, author hive_accounts.name%TYPE, permlink hive_permlink_data.permlink%TYPE, reblogged_at TIMESTAMP )
CREATE FUNCTION hivemind_app.condenser_get_blog_entries( in _blogger VARCHAR, in _last INT, in _limit INT )
RETURNS TABLE( entry_id INT, author hivemind_app.hive_accounts.name%TYPE, permlink hivemind_app.hive_permlink_data.permlink%TYPE, reblogged_at TIMESTAMP )
AS
$function$
DECLARE
__account_id INT;
__offset INT;
BEGIN
SELECT h.* INTO __account_id, __offset, _limit FROM condenser_get_blog_helper( _blogger, _last, _limit ) h;
SELECT h.* INTO __account_id, __offset, _limit FROM hivemind_app.condenser_get_blog_helper( _blogger, _last, _limit ) h;
RETURN QUERY SELECT
blog.entry_id::INT,
ha.name as author,
@ -112,16 +112,16 @@ BEGIN
SELECT
hfc.created_at, hfc.post_id, row_number() over (ORDER BY hfc.created_at ASC, hfc.post_id ASC) - 1 as entry_id
FROM
hive_feed_cache hfc
hivemind_app.hive_feed_cache hfc
WHERE
hfc.account_id = __account_id
ORDER BY hfc.created_at ASC, hfc.post_id ASC
LIMIT _limit
OFFSET __offset
) as blog
JOIN hive_posts hp ON hp.id = blog.post_id
JOIN hive_accounts ha ON ha.id = hp.author_id
JOIN hive_permlink_data hpd ON hpd.id = hp.permlink_id
JOIN hivemind_app.hive_posts hp ON hp.id = blog.post_id
JOIN hivemind_app.hive_accounts ha ON ha.id = hp.author_id
JOIN hivemind_app.hive_permlink_data hpd ON hpd.id = hp.permlink_id
ORDER BY blog.created_at ASC, blog.post_id ASC;
END
$function$

View File

@ -1,23 +1,23 @@
DROP FUNCTION IF EXISTS condenser_get_by_account_comments;
DROP FUNCTION IF EXISTS hivemind_app.condenser_get_by_account_comments;
CREATE OR REPLACE FUNCTION condenser_get_by_account_comments( in _author VARCHAR, in _permlink VARCHAR, in _limit INTEGER)
RETURNS SETOF bridge_api_post
CREATE OR REPLACE FUNCTION hivemind_app.condenser_get_by_account_comments( in _author VARCHAR, in _permlink VARCHAR, in _limit INTEGER)
RETURNS SETOF hivemind_app.bridge_api_post
AS
$function$
DECLARE
__post_id INTEGER := 0;
__author_id INTEGER := 0;
BEGIN
__author_id = find_account_id(_author, True);
__author_id = hivemind_app.find_account_id(_author, True);
IF _permlink <> '' THEN
__post_id = find_comment_id( _author, _permlink, True );
__post_id = hivemind_app.find_comment_id( _author, _permlink, True );
END IF;
RETURN QUERY
WITH comments AS MATERIALIZED -- condenser_get_by_account_comments
(
SELECT id
FROM live_comments_view hp
FROM hivemind_app.live_comments_view hp
WHERE hp.author_id = __author_id
AND ( ( __post_id = 0 ) OR ( hp.id <= __post_id ) )
ORDER BY hp.id DESC
@ -63,7 +63,7 @@ BEGIN
hp.is_muted,
NULL
FROM comments,
LATERAL get_post_view_by_id(comments.id) hp
LATERAL hivemind_app.get_post_view_by_id(comments.id) hp
ORDER BY hp.id DESC
LIMIT _limit;
END

View File

@ -1,26 +1,26 @@
DROP FUNCTION IF EXISTS condenser_get_by_blog;
DROP FUNCTION IF EXISTS hivemind_app.condenser_get_by_blog;
CREATE OR REPLACE FUNCTION condenser_get_by_blog(
CREATE OR REPLACE FUNCTION hivemind_app.condenser_get_by_blog(
in _account VARCHAR,
in _author VARCHAR,
in _permlink VARCHAR,
in _limit INTEGER
)
RETURNS SETOF bridge_api_post
RETURNS SETOF hivemind_app.bridge_api_post
AS
$function$
DECLARE
__post_id INTEGER := 0;
__account_id INTEGER := find_account_id( _account, True );
__account_id INTEGER := hivemind_app.find_account_id( _account, True );
__created_at TIMESTAMP;
BEGIN
IF _permlink <> '' THEN
__post_id = find_comment_id( _author, _permlink, True );
__post_id = hivemind_app.find_comment_id( _author, _permlink, True );
__created_at =
(
SELECT created_at
FROM hive_feed_cache
FROM hivemind_app.hive_feed_cache
WHERE account_id = __account_id
AND post_id = __post_id
);
@ -30,8 +30,8 @@ BEGIN
WITH blog_posts AS MATERIALIZED -- condenser_get_by_blog
(
SELECT hp.id
FROM live_posts_comments_view hp
JOIN hive_feed_cache hfc ON hp.id = hfc.post_id
FROM hivemind_app.live_posts_comments_view hp
JOIN hivemind_app.hive_feed_cache hfc ON hp.id = hfc.post_id
WHERE hfc.account_id = __account_id
AND ( ( __post_id = 0 ) OR ( hfc.created_at <= __created_at ) )
ORDER BY hp.created_at DESC, hp.id DESC
@ -77,7 +77,7 @@ BEGIN
hp.is_muted,
NULL
FROM blog_posts,
LATERAL get_post_view_by_id(blog_posts.id) hp
LATERAL hivemind_app.get_post_view_by_id(blog_posts.id) hp
ORDER BY hp.created_at DESC, hp.id DESC
LIMIT _limit;

View File

@ -1,21 +1,21 @@
DROP FUNCTION IF EXISTS condenser_get_by_blog_without_reblog;
DROP FUNCTION IF EXISTS hivemind_app.condenser_get_by_blog_without_reblog;
CREATE OR REPLACE FUNCTION condenser_get_by_blog_without_reblog( in _author VARCHAR, in _permlink VARCHAR, in _limit INTEGER)
RETURNS SETOF bridge_api_post
CREATE OR REPLACE FUNCTION hivemind_app.condenser_get_by_blog_without_reblog( in _author VARCHAR, in _permlink VARCHAR, in _limit INTEGER)
RETURNS SETOF hivemind_app.bridge_api_post
AS
$function$
DECLARE
__author_id INT;
__post_id INT;
BEGIN
__author_id = find_account_id( _author, True );
__post_id = find_comment_id( _author, _permlink, _permlink <> '' );
__author_id = hivemind_app.find_account_id( _author, True );
__post_id = hivemind_app.find_comment_id( _author, _permlink, _permlink <> '' );
RETURN QUERY
WITH blog_posts AS MATERIALIZED -- condenser_get_by_blog_without_reblog
(
SELECT
hp.id
FROM live_posts_view hp
FROM hivemind_app.live_posts_view hp
WHERE hp.author_id = __author_id
AND ((__post_id = 0) OR (hp.id < __post_id))
ORDER BY hp.id DESC
@ -61,7 +61,7 @@ BEGIN
hp.is_muted,
NULL
FROM blog_posts,
LATERAL get_post_view_by_id(blog_posts.id) hp
LATERAL hivemind_app.get_post_view_by_id(blog_posts.id) hp
ORDER BY hp.id DESC
LIMIT _limit;
END

View File

@ -1,12 +1,12 @@
DROP FUNCTION IF EXISTS condenser_get_content;
CREATE FUNCTION condenser_get_content( in _author VARCHAR, in _permlink VARCHAR )
RETURNS SETOF condenser_api_post_ex
DROP FUNCTION IF EXISTS hivemind_app.condenser_get_content;
CREATE FUNCTION hivemind_app.condenser_get_content( in _author VARCHAR, in _permlink VARCHAR )
RETURNS SETOF hivemind_app.condenser_api_post_ex
AS
$function$
DECLARE
__post_id INT;
BEGIN
__post_id = find_comment_id( _author, _permlink, True );
__post_id = hivemind_app.find_comment_id( _author, _permlink, True );
RETURN QUERY
SELECT
hp.id,
@ -49,25 +49,25 @@ BEGIN
hp.root_title,
hp.active,
hp.author_rewards
FROM get_post_view_by_id(__post_id) hp;
FROM hivemind_app.get_post_view_by_id(__post_id) hp;
END
$function$
language plpgsql STABLE;
DROP FUNCTION IF EXISTS condenser_get_content_replies;
CREATE FUNCTION condenser_get_content_replies( in _author VARCHAR, in _permlink VARCHAR )
RETURNS SETOF condenser_api_post_ex
DROP FUNCTION IF EXISTS hivemind_app.condenser_get_content_replies;
CREATE FUNCTION hivemind_app.condenser_get_content_replies( in _author VARCHAR, in _permlink VARCHAR )
RETURNS SETOF hivemind_app.condenser_api_post_ex
AS
$function$
DECLARE
__post_id INT;
BEGIN
__post_id = find_comment_id( _author, _permlink, True );
__post_id = hivemind_app.find_comment_id( _author, _permlink, True );
RETURN QUERY
WITH replies AS MATERIALIZED -- condenser_get_content_replies
(
SELECT id
FROM live_posts_comments_view hp
FROM hivemind_app.live_posts_comments_view hp
WHERE hp.parent_id = __post_id
ORDER BY hp.id
LIMIT 5000
@ -114,7 +114,7 @@ BEGIN
hp.active,
hp.author_rewards
FROM replies,
LATERAL get_post_view_by_id(replies.id) hp
LATERAL hivemind_app.get_post_view_by_id(replies.id) hp
ORDER BY hp.id;
END
$function$

View File

@ -1,20 +1,20 @@
DROP FUNCTION IF EXISTS condenser_get_names_by_reblogged;
DROP FUNCTION IF EXISTS hivemind_app.condenser_get_names_by_reblogged;
CREATE FUNCTION condenser_get_names_by_reblogged( in _author VARCHAR, in _permlink VARCHAR )
CREATE FUNCTION hivemind_app.condenser_get_names_by_reblogged( in _author VARCHAR, in _permlink VARCHAR )
RETURNS TABLE(
names hive_accounts.name%TYPE
names hivemind_app.hive_accounts.name%TYPE
)
AS
$function$
DECLARE
__post_id INT;
BEGIN
__post_id = find_comment_id( _author, _permlink, True );
__post_id = hivemind_app.find_comment_id( _author, _permlink, True );
RETURN QUERY SELECT
ha.name
FROM hive_accounts ha
JOIN hive_feed_cache hfc ON ha.id = hfc.account_id
FROM hivemind_app.hive_accounts ha
JOIN hivemind_app.hive_feed_cache hfc ON ha.id = hfc.account_id
WHERE hfc.post_id = __post_id
ORDER BY ha.name
;

View File

@ -1,5 +1,5 @@
DROP FUNCTION IF EXISTS condenser_get_top_trending_tags_summary;
CREATE FUNCTION condenser_get_top_trending_tags_summary( in _limit INT )
DROP FUNCTION IF EXISTS hivemind_app.condenser_get_top_trending_tags_summary;
CREATE FUNCTION hivemind_app.condenser_get_top_trending_tags_summary( in _limit INT )
RETURNS SETOF VARCHAR
AS
$function$
@ -7,8 +7,8 @@ BEGIN
RETURN QUERY SELECT
hcd.category
FROM
hive_category_data hcd
JOIN hive_posts hp ON hp.category_id = hcd.id
hivemind_app.hive_category_data hcd
JOIN hivemind_app.hive_posts hp ON hp.category_id = hcd.id
WHERE hp.counter_deleted = 0 AND NOT hp.is_paidout
GROUP BY hcd.category
ORDER BY SUM(hp.payout + hp.pending_payout) DESC
@ -17,19 +17,19 @@ END
$function$
language plpgsql STABLE;
DROP FUNCTION IF EXISTS condenser_get_trending_tags;
CREATE FUNCTION condenser_get_trending_tags( in _category VARCHAR, in _limit INT )
RETURNS TABLE( category VARCHAR, total_posts BIGINT, top_posts BIGINT, total_payouts hive_posts.payout%TYPE )
DROP FUNCTION IF EXISTS hivemind_app.condenser_get_trending_tags;
CREATE FUNCTION hivemind_app.condenser_get_trending_tags( in _category VARCHAR, in _limit INT )
RETURNS TABLE( category VARCHAR, total_posts BIGINT, top_posts BIGINT, total_payouts hivemind_app.hive_posts.payout%TYPE )
AS
$function$
DECLARE
__category_id INT;
__payout_limit hive_posts.payout%TYPE;
__payout_limit hivemind_app.hive_posts.payout%TYPE;
BEGIN
__category_id = find_category_id( _category, True );
__category_id = hivemind_app.find_category_id( _category, True );
IF __category_id <> 0 THEN
SELECT SUM(hp.payout + hp.pending_payout) INTO __payout_limit
FROM hive_posts hp
FROM hivemind_app.hive_posts hp
WHERE hp.category_id = __category_id AND hp.counter_deleted = 0 AND NOT hp.is_paidout;
END IF;
RETURN QUERY SELECT
@ -38,8 +38,8 @@ BEGIN
SUM(CASE WHEN hp.depth = 0 THEN 1 ELSE 0 END) AS top_posts,
SUM(hp.payout + hp.pending_payout) AS total_payouts
FROM
hive_posts hp
JOIN hive_category_data hcd ON hcd.id = hp.category_id
hivemind_app.hive_posts hp
JOIN hivemind_app.hive_category_data hcd ON hcd.id = hp.category_id
WHERE NOT hp.is_paidout AND counter_deleted = 0
GROUP BY hcd.category
HAVING __category_id = 0 OR SUM(hp.payout + hp.pending_payout) < __payout_limit OR ( SUM(hp.payout + hp.pending_payout) = __payout_limit AND hcd.category > _category )

View File

@ -1,5 +1,5 @@
DROP TYPE IF EXISTS database_api_post CASCADE;
CREATE TYPE database_api_post AS (
DROP TYPE IF EXISTS hivemind_app.database_api_post CASCADE;
CREATE TYPE hivemind_app.database_api_post AS (
id INT,
community_id INT,
author VARCHAR(16),
@ -42,12 +42,12 @@ CREATE TYPE database_api_post AS (
author_rewards BIGINT
);
DROP FUNCTION IF EXISTS list_comments_by_permlink(character varying, character varying, int);
CREATE OR REPLACE FUNCTION list_comments_by_permlink(
in _author hive_accounts.name%TYPE,
in _permlink hive_permlink_data.permlink%TYPE,
DROP FUNCTION IF EXISTS hivemind_app.list_comments_by_permlink(character varying, character varying, int);
CREATE OR REPLACE FUNCTION hivemind_app.list_comments_by_permlink(
in _author hivemind_app.hive_accounts.name%TYPE,
in _permlink hivemind_app.hive_permlink_data.permlink%TYPE,
in _limit INT)
RETURNS SETOF database_api_post
RETURNS SETOF hivemind_app.database_api_post
AS
$function$
BEGIN
@ -57,8 +57,8 @@ BEGIN
SELECT
hph.id,
hph.author_s_permlink
FROM hive_posts_api_helper hph
JOIN live_posts_comments_view hp ON hp.id = hph.id
FROM hivemind_app.hive_posts_api_helper hph
JOIN hivemind_app.live_posts_comments_view hp ON hp.id = hph.id
WHERE hph.author_s_permlink >= _author || '/' || _permlink
AND NOT hp.is_muted -- all the mute checks in this file look insufficient, but maybe no one uses these API calls?
AND hph.id != 0 -- what does this do?
@ -75,33 +75,33 @@ BEGIN
hp.allow_curation_rewards, hp.beneficiaries, hp.url, hp.root_title, hp.abs_rshares,
hp.active, hp.author_rewards
FROM comments,
LATERAL get_post_view_by_id(comments.id) hp
LATERAL hivemind_app.get_post_view_by_id(comments.id) hp
ORDER BY hp.author, hp.permlink
LIMIT _limit;
END;
$function$
LANGUAGE plpgsql STABLE;
DROP FUNCTION IF EXISTS list_comments_by_cashout_time(timestamp, character varying, character varying, int);
CREATE OR REPLACE FUNCTION list_comments_by_cashout_time(
DROP FUNCTION IF EXISTS hivemind_app.list_comments_by_cashout_time(timestamp, character varying, character varying, int);
CREATE OR REPLACE FUNCTION hivemind_app.list_comments_by_cashout_time(
in _cashout_time timestamp,
in _author hive_accounts.name%TYPE,
in _permlink hive_permlink_data.permlink%TYPE,
in _author hivemind_app.hive_accounts.name%TYPE,
in _permlink hivemind_app.hive_permlink_data.permlink%TYPE,
in _limit INT)
RETURNS SETOF database_api_post
RETURNS SETOF hivemind_app.database_api_post
AS
$function$
DECLARE
__post_id INT;
BEGIN
__post_id = find_comment_id(_author,_permlink, True);
__post_id = hivemind_app.find_comment_id(_author,_permlink, True);
RETURN QUERY
WITH comments AS MATERIALIZED -- list_comments_by_cashout_time
(
SELECT
hp1.id,
hp1.cashout_time
FROM live_posts_comments_view hp1
FROM hivemind_app.live_posts_comments_view hp1
WHERE NOT hp1.is_muted
AND hp1.cashout_time > _cashout_time
OR hp1.cashout_time = _cashout_time
@ -121,7 +121,7 @@ BEGIN
hp.allow_curation_rewards, hp.beneficiaries, hp.url, hp.root_title, hp.abs_rshares,
hp.active, hp.author_rewards
FROM comments,
LATERAL get_post_view_by_id(comments.id) hp
LATERAL hivemind_app.get_post_view_by_id(comments.id) hp
ORDER BY comments.cashout_time ASC, comments.id ASC
LIMIT _limit
;
@ -129,27 +129,27 @@ END
$function$
LANGUAGE plpgsql STABLE;
DROP FUNCTION IF EXISTS list_comments_by_root(character varying, character varying, character varying, character varying, int);
CREATE OR REPLACE FUNCTION list_comments_by_root(
in _root_author hive_accounts.name%TYPE,
in _root_permlink hive_permlink_data.permlink%TYPE,
in _start_post_author hive_accounts.name%TYPE,
in _start_post_permlink hive_permlink_data.permlink%TYPE,
DROP FUNCTION IF EXISTS hivemind_app.list_comments_by_root(character varying, character varying, character varying, character varying, int);
CREATE OR REPLACE FUNCTION hivemind_app.list_comments_by_root(
in _root_author hivemind_app.hive_accounts.name%TYPE,
in _root_permlink hivemind_app.hive_permlink_data.permlink%TYPE,
in _start_post_author hivemind_app.hive_accounts.name%TYPE,
in _start_post_permlink hivemind_app.hive_permlink_data.permlink%TYPE,
in _limit INT)
RETURNS SETOF database_api_post
RETURNS SETOF hivemind_app.database_api_post
AS
$function$
DECLARE
__root_id INT;
__post_id INT;
BEGIN
__root_id = find_comment_id(_root_author, _root_permlink, True);
__post_id = find_comment_id(_start_post_author, _start_post_permlink, True);
__root_id = hivemind_app.find_comment_id(_root_author, _root_permlink, True);
__post_id = hivemind_app.find_comment_id(_start_post_author, _start_post_permlink, True);
RETURN QUERY
WITH comments AS MATERIALIZED -- list_comments_by_root
(
SELECT hp.id
FROM live_posts_comments_view hp
FROM hivemind_app.live_posts_comments_view hp
WHERE hp.root_id = __root_id
AND NOT hp.is_muted
AND (__post_id = 0 OR hp.id >= __post_id)
@ -166,34 +166,34 @@ BEGIN
hp.allow_curation_rewards, hp.beneficiaries, hp.url, hp.root_title, hp.abs_rshares,
hp.active, hp.author_rewards
FROM comments,
LATERAL get_post_view_by_id(comments.id) hp
LATERAL hivemind_app.get_post_view_by_id(comments.id) hp
ORDER BY comments.id
LIMIT _limit;
END
$function$
LANGUAGE plpgsql STABLE;
DROP FUNCTION IF EXISTS list_comments_by_parent(character varying, character varying, character varying, character varying, int)
DROP FUNCTION IF EXISTS hivemind_app.list_comments_by_parent(character varying, character varying, character varying, character varying, int)
;
CREATE OR REPLACE FUNCTION list_comments_by_parent(
in _parent_author hive_accounts.name%TYPE,
in _parent_permlink hive_permlink_data.permlink%TYPE,
in _start_post_author hive_accounts.name%TYPE,
in _start_post_permlink hive_permlink_data.permlink%TYPE,
CREATE OR REPLACE FUNCTION hivemind_app.list_comments_by_parent(
in _parent_author hivemind_app.hive_accounts.name%TYPE,
in _parent_permlink hivemind_app.hive_permlink_data.permlink%TYPE,
in _start_post_author hivemind_app.hive_accounts.name%TYPE,
in _start_post_permlink hivemind_app.hive_permlink_data.permlink%TYPE,
in _limit INT)
RETURNS SETOF database_api_post
RETURNS SETOF hivemind_app.database_api_post
AS $function$
DECLARE
__post_id INT;
__parent_id INT;
BEGIN
__parent_id = find_comment_id(_parent_author, _parent_permlink, True);
__post_id = find_comment_id(_start_post_author, _start_post_permlink, True);
__parent_id = hivemind_app.find_comment_id(_parent_author, _parent_permlink, True);
__post_id = hivemind_app.find_comment_id(_start_post_author, _start_post_permlink, True);
RETURN QUERY
WITH comments AS MATERIALIZED -- list_comments_by_parent
(
SELECT hp.id
FROM live_posts_comments_view hp
FROM hivemind_app.live_posts_comments_view hp
WHERE hp.parent_id = __parent_id
AND NOT hp.is_muted
AND (__post_id = 0 OR hp.id >= __post_id)
@ -210,38 +210,38 @@ BEGIN
hp.allow_curation_rewards, hp.beneficiaries, hp.url, hp.root_title, hp.abs_rshares,
hp.active, hp.author_rewards
FROM comments,
LATERAL get_post_view_by_id(comments.id) hp
LATERAL hivemind_app.get_post_view_by_id(comments.id) hp
ORDER BY comments.id
LIMIT _limit;
END
$function$
LANGUAGE plpgsql STABLE;
DROP FUNCTION IF EXISTS list_comments_by_last_update(character varying, timestamp, character varying, character varying, int)
DROP FUNCTION IF EXISTS hivemind_app.list_comments_by_last_update(character varying, timestamp, character varying, character varying, int)
;
CREATE OR REPLACE FUNCTION list_comments_by_last_update(
in _parent_author hive_accounts.name%TYPE,
in _updated_at hive_posts.updated_at%TYPE,
in _start_post_author hive_accounts.name%TYPE,
in _start_post_permlink hive_permlink_data.permlink%TYPE,
CREATE OR REPLACE FUNCTION hivemind_app.list_comments_by_last_update(
in _parent_author hivemind_app.hive_accounts.name%TYPE,
in _updated_at hivemind_app.hive_posts.updated_at%TYPE,
in _start_post_author hivemind_app.hive_accounts.name%TYPE,
in _start_post_permlink hivemind_app.hive_permlink_data.permlink%TYPE,
in _limit INT)
RETURNS SETOF database_api_post
RETURNS SETOF hivemind_app.database_api_post
AS
$function$
DECLARE
__post_id INT;
__parent_author_id INT;
BEGIN
__parent_author_id = find_account_id(_parent_author, True);
__post_id = find_comment_id(_start_post_author, _start_post_permlink, True);
__parent_author_id = hivemind_app.find_account_id(_parent_author, True);
__post_id = hivemind_app.find_comment_id(_start_post_author, _start_post_permlink, True);
RETURN QUERY
WITH comments AS MATERIALIZED -- list_comments_by_last_update
(
SELECT
hp1.id,
hp1.updated_at
FROM live_posts_comments_view hp1
JOIN hive_posts hp2 ON hp1.parent_id = hp2.id
FROM hivemind_app.live_posts_comments_view hp1
JOIN hivemind_app.hive_posts hp2 ON hp1.parent_id = hp2.id
WHERE hp2.author_id = __parent_author_id
AND NOT hp1.is_muted
AND (
@ -261,37 +261,37 @@ BEGIN
hp.allow_curation_rewards, hp.beneficiaries, hp.url, hp.root_title, hp.abs_rshares,
hp.active, hp.author_rewards
FROM comments,
LATERAL get_post_view_by_id(comments.id) hp
LATERAL hivemind_app.get_post_view_by_id(comments.id) hp
ORDER BY comments.updated_at DESC, comments.id ASC
LIMIT _limit;
END
$function$
LANGUAGE plpgsql STABLE;
DROP FUNCTION IF EXISTS list_comments_by_author_last_update(character varying, timestamp, character varying, character varying, int)
DROP FUNCTION IF EXISTS hivemind_app.list_comments_by_author_last_update(character varying, timestamp, character varying, character varying, int)
;
CREATE OR REPLACE FUNCTION list_comments_by_author_last_update(
in _author hive_accounts.name%TYPE,
in _updated_at hive_posts.updated_at%TYPE,
in _start_post_author hive_accounts.name%TYPE,
in _start_post_permlink hive_permlink_data.permlink%TYPE,
CREATE OR REPLACE FUNCTION hivemind_app.list_comments_by_author_last_update(
in _author hivemind_app.hive_accounts.name%TYPE,
in _updated_at hivemind_app.hive_posts.updated_at%TYPE,
in _start_post_author hivemind_app.hive_accounts.name%TYPE,
in _start_post_permlink hivemind_app.hive_permlink_data.permlink%TYPE,
in _limit INT)
RETURNS SETOF database_api_post
RETURNS SETOF hivemind_app.database_api_post
AS
$function$
DECLARE
__author_id INT;
__post_id INT;
BEGIN
__author_id = find_account_id(_author, True);
__post_id = find_comment_id(_start_post_author, _start_post_permlink, True);
__author_id = hivemind_app.find_account_id(_author, True);
__post_id = hivemind_app.find_comment_id(_start_post_author, _start_post_permlink, True);
RETURN QUERY
WITH comments AS MATERIALIZED -- list_comments_by_author_last_update
(
SELECT
hp1.id,
hp1.updated_at
FROM live_posts_comments_view hp1
FROM hivemind_app.live_posts_comments_view hp1
WHERE hp1.author_id = __author_id
AND NOT hp1.is_muted
AND (
@ -312,7 +312,7 @@ BEGIN
hp.allow_curation_rewards, hp.beneficiaries, hp.url, hp.root_title, hp.abs_rshares,
hp.active, hp.author_rewards
FROM comments,
LATERAL get_post_view_by_id(comments.id) hp
LATERAL hivemind_app.get_post_view_by_id(comments.id) hp
ORDER BY comments.updated_at DESC, comments.id ASC
LIMIT _limit;
END

View File

@ -1,6 +1,6 @@
DROP TYPE IF EXISTS database_api_vote CASCADE;
DROP TYPE IF EXISTS hivemind_app.database_api_vote CASCADE;
CREATE TYPE database_api_vote AS (
CREATE TYPE hivemind_app.database_api_vote AS (
id BIGINT,
voter VARCHAR(16),
author VARCHAR(16),
@ -13,21 +13,21 @@ CREATE TYPE database_api_vote AS (
reputation BIGINT
);
DROP FUNCTION IF EXISTS find_votes( character varying, character varying, int )
DROP FUNCTION IF EXISTS hivemind_app.find_votes( character varying, character varying, int )
;
CREATE OR REPLACE FUNCTION public.find_votes
CREATE OR REPLACE FUNCTION hivemind_app.find_votes
(
in _AUTHOR hive_accounts.name%TYPE,
in _PERMLINK hive_permlink_data.permlink%TYPE,
in _AUTHOR hivemind_app.hive_accounts.name%TYPE,
in _PERMLINK hivemind_app.hive_permlink_data.permlink%TYPE,
in _LIMIT INT
)
RETURNS SETOF database_api_vote
RETURNS SETOF hivemind_app.database_api_vote
LANGUAGE 'plpgsql'
AS
$function$
DECLARE _POST_ID INT;
BEGIN
_POST_ID = find_comment_id( _AUTHOR, _PERMLINK, True);
_POST_ID = hivemind_app.find_comment_id( _AUTHOR, _PERMLINK, True);
RETURN QUERY
(
@ -43,7 +43,7 @@ RETURN QUERY
v.num_changes,
v.reputation
FROM
hive_votes_view v
hivemind_app.hive_votes_view v
WHERE
v.post_id = _POST_ID
ORDER BY
@ -54,16 +54,16 @@ RETURN QUERY
END
$function$;
DROP FUNCTION IF EXISTS list_votes_by_voter_comment( character varying, character varying, character varying, int )
DROP FUNCTION IF EXISTS hivemind_app.list_votes_by_voter_comment( character varying, character varying, character varying, int )
;
CREATE OR REPLACE FUNCTION public.list_votes_by_voter_comment
CREATE OR REPLACE FUNCTION hivemind_app.list_votes_by_voter_comment
(
in _VOTER hive_accounts.name%TYPE,
in _AUTHOR hive_accounts.name%TYPE,
in _PERMLINK hive_permlink_data.permlink%TYPE,
in _VOTER hivemind_app.hive_accounts.name%TYPE,
in _AUTHOR hivemind_app.hive_accounts.name%TYPE,
in _PERMLINK hivemind_app.hive_permlink_data.permlink%TYPE,
in _LIMIT INT
)
RETURNS SETOF database_api_vote
RETURNS SETOF hivemind_app.database_api_vote
LANGUAGE 'plpgsql'
AS
$function$
@ -71,8 +71,8 @@ DECLARE __voter_id INT;
DECLARE __post_id INT;
BEGIN
__voter_id = find_account_id( _VOTER, True );
__post_id = find_comment_id( _AUTHOR, _PERMLINK, True );
__voter_id = hivemind_app.find_account_id( _VOTER, True );
__post_id = hivemind_app.find_comment_id( _AUTHOR, _PERMLINK, True );
RETURN QUERY
(
@ -88,7 +88,7 @@ RETURN QUERY
v.num_changes,
v.reputation
FROM
hive_votes_view v
hivemind_app.hive_votes_view v
WHERE
v.voter_id = __voter_id
AND v.post_id >= __post_id
@ -100,16 +100,16 @@ RETURN QUERY
END
$function$;
DROP FUNCTION IF EXISTS list_votes_by_comment_voter( character varying, character varying, character varying, int )
DROP FUNCTION IF EXISTS hivemind_app.list_votes_by_comment_voter( character varying, character varying, character varying, int )
;
CREATE OR REPLACE FUNCTION public.list_votes_by_comment_voter
CREATE OR REPLACE FUNCTION hivemind_app.list_votes_by_comment_voter
(
in _VOTER hive_accounts.name%TYPE,
in _AUTHOR hive_accounts.name%TYPE,
in _PERMLINK hive_permlink_data.permlink%TYPE,
in _VOTER hivemind_app.hive_accounts.name%TYPE,
in _AUTHOR hivemind_app.hive_accounts.name%TYPE,
in _PERMLINK hivemind_app.hive_permlink_data.permlink%TYPE,
in _LIMIT INT
)
RETURNS SETOF database_api_vote
RETURNS SETOF hivemind_app.database_api_vote
LANGUAGE 'plpgsql'
AS
$function$
@ -117,8 +117,8 @@ DECLARE __voter_id INT;
DECLARE __post_id INT;
BEGIN
__voter_id = find_account_id( _VOTER, True );
__post_id = find_comment_id( _AUTHOR, _PERMLINK, True );
__voter_id = hivemind_app.find_account_id( _VOTER, True );
__post_id = hivemind_app.find_comment_id( _AUTHOR, _PERMLINK, True );
RETURN QUERY
(
@ -134,7 +134,7 @@ RETURN QUERY
v.num_changes,
v.reputation
FROM
hive_votes_view v
hivemind_app.hive_votes_view v
WHERE
v.post_id = __post_id
AND v.voter_id >= __voter_id

View File

@ -3,11 +3,12 @@
set -e
set -o pipefail
echo "Usage ./db_upgrade.sh <user-name> <db-name>"
echo "Usage ./db_upgrade.sh <postgresql_url>"
rm -f ./upgrade.log
for sql in upgrade/assert_public_schema.sql \
postgres_handle_view_changes.sql \
#upgrade/assert_public_schema.sql \
for sql in postgres_handle_view_changes.sql \
upgrade/upgrade_table_schema.sql \
utility_functions.sql \
hive_accounts_view.sql \
@ -78,12 +79,12 @@ for sql in upgrade/assert_public_schema.sql \
update_table_statistics.sql # Must be last
do
echo Executing psql -U $1 -d $2 -f $sql
time psql -a -1 -v "ON_ERROR_STOP=1" -U $1 -d $2 -c '\timing' -f $sql 2>&1 | tee -a -i upgrade.log
echo Executing psql "$1" -f $sql
time psql -a -1 -v "ON_ERROR_STOP=1" "$1" -c '\timing' -f $sql 2>&1 | tee -a -i upgrade.log
echo $?
done
time psql -a -v "ON_ERROR_STOP=1" -U $1 -d $2 -c '\timing' -f upgrade/upgrade_runtime_migration.sql 2>&1 | tee -a -i upgrade.log
time psql -a -v "ON_ERROR_STOP=1" "$1" -c '\timing' -f upgrade/upgrade_runtime_migration.sql 2>&1 | tee -a -i upgrade.log
time psql -a -v "ON_ERROR_STOP=1" -U $1 -d $2 -c '\timing' -f upgrade/do_conditional_vacuum.sql 2>&1 | tee -a -i upgrade.log
time psql -a -v "ON_ERROR_STOP=1" "$1" -c '\timing' -f upgrade/do_conditional_vacuum.sql 2>&1 | tee -a -i upgrade.log

View File

@ -1,6 +1,6 @@
DROP FUNCTION IF EXISTS delete_hive_posts_mentions();
DROP FUNCTION IF EXISTS hivemind_app.delete_hive_posts_mentions();
CREATE OR REPLACE FUNCTION delete_hive_posts_mentions()
CREATE OR REPLACE FUNCTION hivemind_app.delete_hive_posts_mentions()
RETURNS VOID
LANGUAGE 'plpgsql'
AS
@ -9,9 +9,9 @@ DECLARE
__90_days_beyond_head_block_number INTEGER;
BEGIN
__90_days_beyond_head_block_number = block_before_head('90 days'::interval);
__90_days_beyond_head_block_number = hivemind_app.block_before_head('90 days'::interval);
DELETE FROM hive_mentions
DELETE FROM hivemind_app.hive_mentions
WHERE block_num < __90_days_beyond_head_block_number;
END

View File

@ -1,11 +1,11 @@
DROP FUNCTION IF EXISTS delete_reblog_feed_cache(character varying,character varying,character varying)
DROP FUNCTION IF EXISTS hivemind_app.delete_reblog_feed_cache(character varying,character varying,character varying)
;
CREATE OR REPLACE FUNCTION delete_reblog_feed_cache(
in _author hive_accounts.name%TYPE,
in _permlink hive_permlink_data.permlink%TYPE,
in _account hive_accounts.name%TYPE)
CREATE OR REPLACE FUNCTION hivemind_app.delete_reblog_feed_cache(
in _author hivemind_app.hive_accounts.name%TYPE,
in _permlink hivemind_app.hive_permlink_data.permlink%TYPE,
in _account hivemind_app.hive_accounts.name%TYPE)
RETURNS INTEGER
LANGUAGE plpgsql
AS
@ -15,17 +15,17 @@ DECLARE
__post_id INT;
BEGIN
__account_id = find_account_id( _account, False );
__post_id = find_comment_id( _author, _permlink, False );
__account_id = hivemind_app.find_account_id( _account, False );
__post_id = hivemind_app.find_comment_id( _author, _permlink, False );
IF __post_id = 0 THEN
RETURN 0;
END IF;
DELETE FROM hive_reblogs
DELETE FROM hivemind_app.hive_reblogs
WHERE blogger_id = __account_id AND post_id = __post_id;
DELETE FROM hive_feed_cache
DELETE FROM hivemind_app.hive_feed_cache
WHERE account_id = __account_id AND post_id = __post_id;
RETURN 1;

View File

@ -1,6 +1,6 @@
DROP FUNCTION IF EXISTS follow_reset_blacklist(character varying, integer)
DROP FUNCTION IF EXISTS hivemind_app.follow_reset_blacklist(character varying, integer)
;
CREATE OR REPLACE FUNCTION follow_reset_blacklist(in _follower hive_accounts.name%TYPE, in _block_num hive_follows.block_num%TYPE)
CREATE OR REPLACE FUNCTION hivemind_app.follow_reset_blacklist(in _follower hivemind_app.hive_accounts.name%TYPE, in _block_num hivemind_app.hive_follows.block_num%TYPE)
RETURNS VOID
LANGUAGE plpgsql
AS
@ -8,17 +8,17 @@ $function$
DECLARE
__account_id INT;
BEGIN
__account_id = find_account_id( _follower, False );
UPDATE hive_follows hf -- follow_reset_blacklist
__account_id = hivemind_app.find_account_id( _follower, False );
UPDATE hivemind_app.hive_follows hf -- follow_reset_blacklist
SET blacklisted = false, block_num = _block_num
WHERE hf.follower = __account_id AND hf.blacklisted;
END
$function$
;
DROP FUNCTION IF EXISTS follow_reset_following_list(character varying, integer)
DROP FUNCTION IF EXISTS hivemind_app.follow_reset_following_list(character varying, integer)
;
CREATE OR REPLACE FUNCTION follow_reset_following_list(in _follower hive_accounts.name%TYPE, in _block_num hive_follows.block_num%TYPE)
CREATE OR REPLACE FUNCTION hivemind_app.follow_reset_following_list(in _follower hivemind_app.hive_accounts.name%TYPE, in _block_num hivemind_app.hive_follows.block_num%TYPE)
RETURNS VOID
LANGUAGE plpgsql
AS
@ -26,17 +26,17 @@ $function$
DECLARE
__account_id INT;
BEGIN
__account_id = find_account_id( _follower, False );
UPDATE hive_follows hf -- follow_reset_following_list
__account_id = hivemind_app.find_account_id( _follower, False );
UPDATE hivemind_app.hive_follows hf -- follow_reset_following_list
SET state = 0, block_num = _block_num
WHERE hf.follower = __account_id AND hf.state = 1;
END
$function$
;
DROP FUNCTION IF EXISTS follow_reset_muted_list(character varying, integer)
DROP FUNCTION IF EXISTS hivemind_app.follow_reset_muted_list(character varying, integer)
;
CREATE OR REPLACE FUNCTION follow_reset_muted_list(in _follower hive_accounts.name%TYPE, in _block_num hive_follows.block_num%TYPE)
CREATE OR REPLACE FUNCTION hivemind_app.follow_reset_muted_list(in _follower hivemind_app.hive_accounts.name%TYPE, in _block_num hivemind_app.hive_follows.block_num%TYPE)
RETURNS VOID
LANGUAGE plpgsql
AS
@ -44,17 +44,17 @@ $function$
DECLARE
__account_id INT;
BEGIN
__account_id = find_account_id( _follower, False );
UPDATE hive_follows hf -- follow_reset_muted_list
__account_id = hivemind_app.find_account_id( _follower, False );
UPDATE hivemind_app.hive_follows hf -- follow_reset_muted_list
SET state = 0, block_num = _block_num
WHERE hf.follower = __account_id AND hf.state = 2;
END
$function$
;
DROP FUNCTION IF EXISTS follow_reset_follow_blacklist(character varying, integer)
DROP FUNCTION IF EXISTS hivemind_app.follow_reset_follow_blacklist(character varying, integer)
;
CREATE OR REPLACE FUNCTION follow_reset_follow_blacklist(in _follower hive_accounts.name%TYPE, in _block_num hive_follows.block_num%TYPE)
CREATE OR REPLACE FUNCTION hivemind_app.follow_reset_follow_blacklist(in _follower hivemind_app.hive_accounts.name%TYPE, in _block_num hivemind_app.hive_follows.block_num%TYPE)
RETURNS VOID
LANGUAGE plpgsql
AS
@ -62,17 +62,17 @@ $function$
DECLARE
__account_id INT;
BEGIN
__account_id = find_account_id( _follower, False );
UPDATE hive_follows hf -- follow_reset_follow_blacklist
__account_id = hivemind_app.find_account_id( _follower, False );
UPDATE hivemind_app.hive_follows hf -- follow_reset_follow_blacklist
SET follow_blacklists = false, block_num = _block_num
WHERE hf.follower = __account_id AND hf.follow_blacklists;
END
$function$
;
DROP FUNCTION IF EXISTS follow_reset_follow_muted_list(character varying, integer)
DROP FUNCTION IF EXISTS hivemind_app.follow_reset_follow_muted_list(character varying, integer)
;
CREATE OR REPLACE FUNCTION follow_reset_follow_muted_list(in _follower hive_accounts.name%TYPE, in _block_num hive_follows.block_num%TYPE)
CREATE OR REPLACE FUNCTION hivemind_app.follow_reset_follow_muted_list(in _follower hivemind_app.hive_accounts.name%TYPE, in _block_num hivemind_app.hive_follows.block_num%TYPE)
RETURNS VOID
LANGUAGE plpgsql
AS
@ -80,17 +80,17 @@ $function$
DECLARE
__account_id INT;
BEGIN
__account_id = find_account_id( _follower, False );
UPDATE hive_follows hf -- follow_reset_follow_muted_list
__account_id = hivemind_app.find_account_id( _follower, False );
UPDATE hivemind_app.hive_follows hf -- follow_reset_follow_muted_list
SET follow_muted = false, block_num = _block_num
WHERE hf.follower = __account_id AND hf.follow_muted;
END
$function$
;
DROP FUNCTION IF EXISTS follow_reset_all_lists(character varying, integer)
DROP FUNCTION IF EXISTS hivemind_app.follow_reset_all_lists(character varying, integer)
;
CREATE OR REPLACE FUNCTION follow_reset_all_lists(in _follower hive_accounts.name%TYPE, in _block_num hive_follows.block_num%TYPE)
CREATE OR REPLACE FUNCTION hivemind_app.follow_reset_all_lists(in _follower hivemind_app.hive_accounts.name%TYPE, in _block_num hivemind_app.hive_follows.block_num%TYPE)
RETURNS VOID
LANGUAGE plpgsql
AS
@ -98,8 +98,8 @@ $function$
DECLARE
__account_id INT;
BEGIN
__account_id = find_account_id( _follower, False );
UPDATE hive_follows hf -- follow_reset_all_lists
__account_id = hivemind_app.find_account_id( _follower, False );
UPDATE hivemind_app.hive_follows hf -- follow_reset_all_lists
SET blacklisted = false, follow_blacklists = false, follow_muted = false, state = 0, block_num = _block_num
WHERE hf.follower = __account_id;
END

View File

@ -1,6 +1,6 @@
DROP TYPE IF EXISTS get_post_view_by_id_return_t CASCADE;
DROP TYPE IF EXISTS hivemind_app.get_post_view_by_id_return_t CASCADE;
CREATE TYPE get_post_view_by_id_return_t AS(
CREATE TYPE hivemind_app.get_post_view_by_id_return_t AS(
id integer,
community_id integer,
root_id integer,
@ -66,7 +66,7 @@ CREATE TYPE get_post_view_by_id_return_t AS(
block_num integer
);
CREATE OR REPLACE FUNCTION get_post_view_by_id(_id hive_posts.id%TYPE) RETURNS SETOF get_post_view_by_id_return_t
CREATE OR REPLACE FUNCTION hivemind_app.get_post_view_by_id(_id hivemind_app.hive_posts.id%TYPE) RETURNS SETOF hivemind_app.get_post_view_by_id_return_t
AS $function$
BEGIN
RETURN QUERY
@ -141,25 +141,25 @@ BEGIN
hc.title AS community_title,
hc.name AS community_name,
hp.block_num
FROM hive_posts hp
FROM hivemind_app.hive_posts hp
-- post data (6 joins)
JOIN hive_accounts_view ha_a ON ha_a.id = hp.author_id
JOIN hive_category_data hcd ON hcd.id = hp.category_id
JOIN hive_permlink_data hpd_p ON hpd_p.id = hp.permlink_id
LEFT JOIN hive_communities hc ON hp.community_id = hc.id
LEFT JOIN hive_roles hr ON hp.author_id = hr.account_id AND hp.community_id = hr.community_id
JOIN hivemind_app.hive_accounts_view ha_a ON ha_a.id = hp.author_id
JOIN hivemind_app.hive_category_data hcd ON hcd.id = hp.category_id
JOIN hivemind_app.hive_permlink_data hpd_p ON hpd_p.id = hp.permlink_id
LEFT JOIN hivemind_app.hive_communities hc ON hp.community_id = hc.id
LEFT JOIN hivemind_app.hive_roles hr ON hp.author_id = hr.account_id AND hp.community_id = hr.community_id
-- parent post data
JOIN hive_posts pp ON pp.id = hp.parent_id -- parent post (0 or 1 parent)
JOIN hive_accounts ha_pp ON ha_pp.id = pp.author_id
JOIN hive_permlink_data hpd_pp ON hpd_pp.id = pp.permlink_id
JOIN hivemind_app.hive_posts pp ON pp.id = hp.parent_id -- parent post (0 or 1 parent)
JOIN hivemind_app.hive_accounts ha_pp ON ha_pp.id = pp.author_id
JOIN hivemind_app.hive_permlink_data hpd_pp ON hpd_pp.id = pp.permlink_id
-- root post data
JOIN hive_posts rp ON rp.id = hp.root_id -- root_post (0 or 1 root)
JOIN hive_accounts ha_rp ON ha_rp.id = rp.author_id
JOIN hive_permlink_data hpd_rp ON hpd_rp.id = rp.permlink_id
JOIN hive_category_data rcd ON rcd.id = rp.category_id
JOIN hive_post_data rpd ON rpd.id = rp.id
JOIN hivemind_app.hive_posts rp ON rp.id = hp.root_id -- root_post (0 or 1 root)
JOIN hivemind_app.hive_accounts ha_rp ON ha_rp.id = rp.author_id
JOIN hivemind_app.hive_permlink_data hpd_rp ON hpd_rp.id = rp.permlink_id
JOIN hivemind_app.hive_category_data rcd ON rcd.id = rp.category_id
JOIN hivemind_app.hive_post_data rpd ON rpd.id = rp.id
-- largest joined data
JOIN hive_post_data hpd ON hpd.id = hp.id
JOIN hivemind_app.hive_post_data hpd ON hpd.id = hp.id
WHERE hp.id = _id AND hp.counter_deleted = 0;
END;
$function$ LANGUAGE plpgsql STABLE SET join_collapse_limit = 1;

View File

@ -0,0 +1,66 @@
DROP TYPE IF EXISTS hivemind_app.hive_api_operation CASCADE;
CREATE TYPE hivemind_app.hive_api_operation AS (
id BIGINT,
block_num INT,
operation_type_id SMALLINT,
is_virtual BOOLEAN,
body VARCHAR
);
CREATE OR REPLACE FUNCTION hivemind_app.enum_operations4hivemind(in _first_block INT, in _last_block INT)
RETURNS SETOF hivemind_app.hive_api_operation
AS
$function$
BEGIN
/** Hivemind requires only following kinds of virtual operations:
author_reward_operation = 51
comment_reward_operation = 53
effective_comment_vote_operation = 72
comment_payout_update_operation = 61
ineffective_delete_comment_operation = 73
*/
RETURN QUERY -- enum_operations4hivemind
SELECT ho.id, ho.block_num, ho.op_type_id, ho.op_type_id >= 50 AS is_virtual, ho.body::VARCHAR
FROM hive.hivemind_app_operations_view ho
WHERE ho.block_num BETWEEN _first_block AND _last_block
AND (ho.op_type_id < 50
OR ho.op_type_id in (51, 53, 61, 72, 73)
)
ORDER BY ho.block_num, ho.id
;
END
$function$
LANGUAGE plpgsql STABLE
;
DROP TYPE IF EXISTS hivemind_app.hive_api_hivemind_blocks CASCADE;
CREATE TYPE hivemind_app.hive_api_hivemind_blocks AS (
num INTEGER,
hash BYTEA,
prev BYTEA,
date TEXT
);
CREATE OR REPLACE FUNCTION hivemind_app.enum_blocks4hivemind(in _first_block INT, in _last_block INT)
RETURNS SETOF hivemind_app.hive_api_hivemind_blocks
AS
$function$
BEGIN
RETURN QUERY
SELECT -- hivemind_app.hive_api_hivemind_blocks
hb.num
, hb.hash
, hb.prev as prev
, to_char( created_at, 'YYYY-MM-DDThh24:MI:SS' ) as date
FROM hive.hivemind_app_blocks_view hb
WHERE hb.num BETWEEN _first_block AND _last_block
ORDER by hb.num
;
END
$function$
LANGUAGE plpgsql STABLE
;

View File

@ -1,19 +1,19 @@
DROP FUNCTION IF EXISTS head_block_time CASCADE;
CREATE OR REPLACE FUNCTION head_block_time()
DROP FUNCTION IF EXISTS hivemind_app.head_block_time CASCADE;
CREATE OR REPLACE FUNCTION hivemind_app.head_block_time()
RETURNS TIMESTAMP
LANGUAGE 'sql' STABLE
AS
$BODY$
SELECT hb.created_at FROM hive_blocks hb ORDER BY hb.num DESC LIMIT 1
SELECT last_imported_block_date FROM hivemind_app.hive_state LIMIT 1
$BODY$
;
DROP FUNCTION IF EXISTS block_before_head CASCADE;
CREATE OR REPLACE FUNCTION block_before_head( in _time INTERVAL )
RETURNS hive_blocks.num%TYPE
DROP FUNCTION IF EXISTS hivemind_app.block_before_head CASCADE;
CREATE OR REPLACE FUNCTION hivemind_app.block_before_head( in _time INTERVAL )
RETURNS hive.hivemind_app_blocks_view.num%TYPE
LANGUAGE 'sql' STABLE
AS
$BODY$
SELECT MAX(hb1.num) - CAST( extract(epoch from _time)/3 as INTEGER ) FROM hive_blocks hb1
SELECT last_imported_block_num - CAST( extract(epoch from _time)/3 as INTEGER ) FROM hivemind_app.hive_state LIMIT 1
$BODY$

View File

@ -1,5 +1,5 @@
DROP VIEW IF EXISTS hive_accounts_info_view_lite CASCADE;
CREATE OR REPLACE VIEW public.hive_accounts_info_view_lite
DROP VIEW IF EXISTS hivemind_app.hive_accounts_info_view_lite CASCADE;
CREATE OR REPLACE VIEW hivemind_app.hive_accounts_info_view_lite
AS
SELECT ha.id,
ha.name,
@ -12,17 +12,17 @@ CREATE OR REPLACE VIEW public.hive_accounts_info_view_lite
ha.lastread_at,
ha.posting_json_metadata,
ha.json_metadata
FROM hive_accounts ha
FROM hivemind_app.hive_accounts ha
LEFT JOIN LATERAL
(
SELECT COUNT(1) AS post_count
FROM hive_posts hp
FROM hivemind_app.hive_posts hp
WHERE hp.counter_deleted = 0 and hp.author_id = ha.id
) posts ON true
;
DROP VIEW IF EXISTS hive_accounts_info_view;
CREATE OR REPLACE VIEW public.hive_accounts_info_view
DROP VIEW IF EXISTS hivemind_app.hive_accounts_info_view;
CREATE OR REPLACE VIEW hivemind_app.hive_accounts_info_view
AS
SELECT ha.id,
ha.name,
@ -40,18 +40,18 @@ CREATE OR REPLACE VIEW public.hive_accounts_info_view
ha.lastread_at,
ha.posting_json_metadata,
ha.json_metadata
FROM hive_accounts_info_view_lite ha
FROM hivemind_app.hive_accounts_info_view_lite ha
LEFT JOIN LATERAL
(
SELECT hp1.created_at AS latest_post
FROM hive_posts hp1
FROM hivemind_app.hive_posts hp1
WHERE hp1.counter_deleted = 0 and hp1.author_id = ha.id
ORDER BY hp1.created_at DESC, hp1.author_id DESC LIMIT 1
) latest_post on true
LEFT JOIN LATERAL
(
SELECT hvf.last_update AS latest_vote
FROM hive_votes hvf
FROM hivemind_app.hive_votes hvf
WHERE hvf.voter_id = ha.id
ORDER BY hvf.voter_id DESC, hvf.last_update DESC LIMIT 1
) whole_votes ON true

View File

@ -1,6 +1,6 @@
DROP VIEW IF EXISTS public.hive_accounts_view CASCADE;
DROP VIEW IF EXISTS hivemind_app.hive_accounts_view CASCADE;
CREATE OR REPLACE VIEW public.hive_accounts_view
CREATE OR REPLACE VIEW hivemind_app.hive_accounts_view
AS
SELECT id,
name,
@ -14,5 +14,5 @@ SELECT id,
posting_json_metadata,
json_metadata,
( reputation <= -464800000000 ) is_grayed -- biggest number where rep_log10 gives < 1.0
FROM hive_accounts
FROM hivemind_app.hive_accounts
;

View File

@ -1,22 +1,22 @@
DROP VIEW IF EXISTS blacklisted_by_observer_view;
CREATE OR REPLACE VIEW blacklisted_by_observer_view AS
DROP VIEW IF EXISTS hivemind_app.blacklisted_by_observer_view;
CREATE OR REPLACE VIEW hivemind_app.blacklisted_by_observer_view AS
SELECT observer_accounts.id AS observer_id,
following_accounts.id AS blacklisted_id,
following_accounts.name AS blacklisted_name,
'my blacklist'::text AS source
FROM ((hive_follows
JOIN hive_accounts following_accounts ON ((hive_follows.following = following_accounts.id)))
JOIN hive_accounts observer_accounts ON ((hive_follows.follower = observer_accounts.id)))
WHERE hive_follows.blacklisted
FROM ((hivemind_app.hive_follows
JOIN hivemind_app.hive_accounts following_accounts ON ((hive_follows.following = following_accounts.id)))
JOIN hivemind_app.hive_accounts observer_accounts ON ((hive_follows.follower = observer_accounts.id)))
WHERE hivemind_app.hive_follows.blacklisted
UNION ALL
SELECT observer_accounts.id AS observer_id,
following_accounts.id AS blacklisted_id,
following_accounts.name AS blacklisted_name,
string_agg(('blacklisted by '::text || (indirect_accounts.name)::text), ','::text ORDER BY indirect_accounts.name) AS source
FROM (((hive_follows hive_follows_direct
JOIN hive_follows hive_follows_indirect ON ((hive_follows_direct.following = hive_follows_indirect.follower)))
JOIN hive_accounts following_accounts ON ((hive_follows_indirect.following = following_accounts.id)))
JOIN hive_accounts observer_accounts ON ((hive_follows_direct.follower = observer_accounts.id)))
JOIN hive_accounts indirect_accounts ON ((hive_follows_indirect.follower = indirect_accounts.id))
FROM (((hivemind_app.hive_follows hive_follows_direct
JOIN hivemind_app.hive_follows hive_follows_indirect ON ((hive_follows_direct.following = hive_follows_indirect.follower)))
JOIN hivemind_app.hive_accounts following_accounts ON ((hive_follows_indirect.following = following_accounts.id)))
JOIN hivemind_app.hive_accounts observer_accounts ON ((hive_follows_direct.follower = observer_accounts.id)))
JOIN hivemind_app.hive_accounts indirect_accounts ON ((hive_follows_indirect.follower = indirect_accounts.id))
WHERE (hive_follows_direct.follow_blacklists AND hive_follows_indirect.blacklisted)
GROUP BY observer_accounts.id, following_accounts.id;

View File

@ -1,16 +1,16 @@
DROP VIEW IF EXISTS muted_accounts_by_id_view CASCADE;
CREATE OR REPLACE VIEW muted_accounts_by_id_view AS
DROP VIEW IF EXISTS hivemind_app.muted_accounts_by_id_view CASCADE;
CREATE OR REPLACE VIEW hivemind_app.muted_accounts_by_id_view AS
SELECT observer_accounts.id AS observer_id,
following_accounts.id AS muted_id
FROM ((hive_follows
JOIN hive_accounts following_accounts ON ((hive_follows.following = following_accounts.id)))
JOIN hive_accounts observer_accounts ON ((hive_follows.follower = observer_accounts.id)))
FROM ((hivemind_app.hive_follows
JOIN hivemind_app.hive_accounts following_accounts ON ((hive_follows.following = following_accounts.id)))
JOIN hivemind_app.hive_accounts observer_accounts ON ((hive_follows.follower = observer_accounts.id)))
WHERE (hive_follows.state = 2)
UNION
SELECT observer_accounts.id AS observer_id,
following_accounts.id AS muted_id
FROM (((hive_follows hive_follows_direct
JOIN hive_follows hive_follows_indirect ON ((hive_follows_direct.following = hive_follows_indirect.follower)))
JOIN hive_accounts following_accounts ON ((hive_follows_indirect.following = following_accounts.id)))
JOIN hive_accounts observer_accounts ON ((hive_follows_direct.follower = observer_accounts.id)))
FROM (((hivemind_app.hive_follows hive_follows_direct
JOIN hivemind_app.hive_follows hive_follows_indirect ON ((hive_follows_direct.following = hive_follows_indirect.follower)))
JOIN hivemind_app.hive_accounts following_accounts ON ((hive_follows_indirect.following = following_accounts.id)))
JOIN hivemind_app.hive_accounts observer_accounts ON ((hive_follows_direct.follower = observer_accounts.id)))
WHERE (hive_follows_direct.follow_muted AND (hive_follows_indirect.state = 2));

View File

@ -1,17 +1,17 @@
DROP VIEW IF EXISTS muted_accounts_view;
CREATE OR REPLACE VIEW muted_accounts_view AS
DROP VIEW IF EXISTS hivemind_app.muted_accounts_view;
CREATE OR REPLACE VIEW hivemind_app.muted_accounts_view AS
(
SELECT observer_accounts.name AS observer, following_accounts.name AS muted
FROM hive_follows JOIN hive_accounts following_accounts ON hive_follows.following = following_accounts.id
JOIN hive_accounts observer_accounts ON hive_follows.follower = observer_accounts.id
WHERE hive_follows.state = 2
FROM hivemind_app.hive_follows JOIN hivemind_app.hive_accounts following_accounts ON hivemind_app.hive_follows.following = following_accounts.id
JOIN hivemind_app.hive_accounts observer_accounts ON hivemind_app.hive_follows.follower = observer_accounts.id
WHERE hivemind_app.hive_follows.state = 2
UNION
SELECT observer_accounts.name AS observer, following_accounts.name AS muted
FROM hive_follows hive_follows_direct JOIN hive_follows hive_follows_indirect ON hive_follows_direct.following = hive_follows_indirect.follower
JOIN hive_accounts following_accounts ON hive_follows_indirect.following = following_accounts.id
JOIN hive_accounts observer_accounts ON hive_follows_direct.follower = observer_accounts.id
FROM hivemind_app.hive_follows hive_follows_direct JOIN hivemind_app.hive_follows hive_follows_indirect ON hive_follows_direct.following = hive_follows_indirect.follower
JOIN hivemind_app.hive_accounts following_accounts ON hive_follows_indirect.following = following_accounts.id
JOIN hivemind_app.hive_accounts observer_accounts ON hive_follows_direct.follower = observer_accounts.id
WHERE hive_follows_direct.follow_muted AND hive_follows_indirect.state = 2
);

View File

@ -1,6 +1,6 @@
DROP FUNCTION IF EXISTS prepare_tags;
CREATE OR REPLACE FUNCTION prepare_tags( in _raw_tags VARCHAR[] )
RETURNS SETOF hive_tag_data.id%TYPE
DROP FUNCTION IF EXISTS hivemind_app.prepare_tags;
CREATE OR REPLACE FUNCTION hivemind_app.prepare_tags( in _raw_tags VARCHAR[] )
RETURNS SETOF hivemind_app.hive_tag_data.id%TYPE
LANGUAGE 'plpgsql'
VOLATILE
AS
@ -18,33 +18,33 @@ BEGIN
END LOOP;
RETURN QUERY INSERT INTO
hive_tag_data AS htd(tag)
hivemind_app.hive_tag_data AS htd(tag)
SELECT UNNEST( __tags )
ON CONFLICT("tag") DO UPDATE SET tag=EXCLUDED.tag --trick to always return id
RETURNING htd.id;
END
$function$;
DROP FUNCTION IF EXISTS process_hive_post_operation;
DROP FUNCTION IF EXISTS hivemind_app.process_hive_post_operation;
;
CREATE OR REPLACE FUNCTION process_hive_post_operation(
in _author hive_accounts.name%TYPE,
in _permlink hive_permlink_data.permlink%TYPE,
in _parent_author hive_accounts.name%TYPE,
in _parent_permlink hive_permlink_data.permlink%TYPE,
in _date hive_posts.created_at%TYPE,
in _community_support_start_block hive_posts.block_num%TYPE,
in _block_num hive_posts.block_num%TYPE,
CREATE OR REPLACE FUNCTION hivemind_app.process_hive_post_operation(
in _author hivemind_app.hive_accounts.name%TYPE,
in _permlink hivemind_app.hive_permlink_data.permlink%TYPE,
in _parent_author hivemind_app.hive_accounts.name%TYPE,
in _parent_permlink hivemind_app.hive_permlink_data.permlink%TYPE,
in _date hivemind_app.hive_posts.created_at%TYPE,
in _community_support_start_block hivemind_app.hive_posts.block_num%TYPE,
in _block_num hivemind_app.hive_posts.block_num%TYPE,
in _metadata_tags VARCHAR[])
RETURNS TABLE (is_new_post boolean, id hive_posts.id%TYPE, author_id hive_posts.author_id%TYPE, permlink_id hive_posts.permlink_id%TYPE,
post_category hive_category_data.category%TYPE, parent_id hive_posts.parent_id%TYPE, community_id hive_posts.community_id%TYPE,
is_valid hive_posts.is_valid%TYPE, is_muted hive_posts.is_muted%TYPE, depth hive_posts.depth%TYPE)
RETURNS TABLE (is_new_post boolean, id hivemind_app.hive_posts.id%TYPE, author_id hivemind_app.hive_posts.author_id%TYPE, permlink_id hivemind_app.hive_posts.permlink_id%TYPE,
post_category hivemind_app.hive_category_data.category%TYPE, parent_id hivemind_app.hive_posts.parent_id%TYPE, community_id hivemind_app.hive_posts.community_id%TYPE,
is_valid hivemind_app.hive_posts.is_valid%TYPE, is_muted hivemind_app.hive_posts.is_muted%TYPE, depth hivemind_app.hive_posts.depth%TYPE)
LANGUAGE plpgsql
AS
$function$
BEGIN
INSERT INTO hive_permlink_data
INSERT INTO hivemind_app.hive_permlink_data
(permlink)
values
(
@ -53,17 +53,17 @@ _permlink
ON CONFLICT DO NOTHING
;
if _parent_author != '' THEN
RETURN QUERY INSERT INTO hive_posts as hp
RETURN QUERY INSERT INTO hivemind_app.hive_posts as hp
(parent_id, depth, community_id, category_id,
root_id, is_muted, is_valid,
author_id, permlink_id, created_at, updated_at, sc_hot, sc_trend, active, payout_at, cashout_time, counter_deleted, block_num, block_num_created)
SELECT php.id AS parent_id, php.depth + 1 AS depth,
(CASE
WHEN _block_num > _community_support_start_block THEN
COALESCE(php.community_id, (select hc.id from hive_communities hc where hc.name = _parent_permlink))
COALESCE(php.community_id, (select hc.id from hivemind_app.hive_communities hc where hc.name = _parent_permlink))
ELSE NULL
END) AS community_id,
COALESCE(php.category_id, (select hcg.id from hive_category_data hcg where hcg.category = _parent_permlink)) AS category_id,
COALESCE(php.category_id, (select hcg.id from hivemind_app.hive_category_data hcg where hcg.category = _parent_permlink)) AS category_id,
(CASE(php.root_id)
WHEN 0 THEN php.id
ELSE php.root_id
@ -71,15 +71,15 @@ if _parent_author != '' THEN
php.is_muted AS is_muted, php.is_valid AS is_valid,
ha.id AS author_id, hpd.id AS permlink_id, _date AS created_at,
_date AS updated_at,
calculate_time_part_of_hot(_date) AS sc_hot,
calculate_time_part_of_trending(_date) AS sc_trend,
hivemind_app.calculate_time_part_of_hot(_date) AS sc_hot,
hivemind_app.calculate_time_part_of_trending(_date) AS sc_trend,
_date AS active, (_date + INTERVAL '7 days') AS payout_at, (_date + INTERVAL '7 days') AS cashout_time, 0,
_block_num as block_num, _block_num as block_num_created
FROM hive_accounts ha,
hive_permlink_data hpd,
hive_posts php
INNER JOIN hive_accounts pha ON pha.id = php.author_id
INNER JOIN hive_permlink_data phpd ON phpd.id = php.permlink_id
FROM hivemind_app.hive_accounts ha,
hivemind_app.hive_permlink_data hpd,
hivemind_app.hive_posts php
INNER JOIN hivemind_app.hive_accounts pha ON pha.id = php.author_id
INNER JOIN hivemind_app.hive_permlink_data phpd ON phpd.id = php.permlink_id
WHERE pha.name = _parent_author AND phpd.permlink = _parent_permlink AND
ha.name = _author AND hpd.permlink = _permlink AND php.counter_deleted = 0
@ -90,16 +90,16 @@ if _parent_author != '' THEN
updated_at = _date,
active = _date,
block_num = _block_num
RETURNING (xmax = 0) as is_new_post, hp.id, hp.author_id, hp.permlink_id, (SELECT hcd.category FROM hive_category_data hcd WHERE hcd.id = hp.category_id) as post_category, hp.parent_id, hp.community_id, hp.is_valid, hp.is_muted, hp.depth
RETURNING (xmax = 0) as is_new_post, hp.id, hp.author_id, hp.permlink_id, (SELECT hcd.category FROM hivemind_app.hive_category_data hcd WHERE hcd.id = hp.category_id) as post_category, hp.parent_id, hp.community_id, hp.is_valid, hp.is_muted, hp.depth
;
ELSE
INSERT INTO hive_category_data
INSERT INTO hivemind_app.hive_category_data
(category)
VALUES (_parent_permlink)
ON CONFLICT (category) DO NOTHING
;
RETURN QUERY INSERT INTO hive_posts as hp
RETURN QUERY INSERT INTO hivemind_app.hive_posts as hp
(parent_id, depth, community_id, category_id,
root_id, is_muted, is_valid,
author_id, permlink_id, created_at, updated_at, sc_hot, sc_trend,
@ -108,24 +108,24 @@ ELSE
SELECT 0 AS parent_id, 0 AS depth,
(CASE
WHEN _block_num > _community_support_start_block THEN
(select hc.id FROM hive_communities hc WHERE hc.name = _parent_permlink)
(select hc.id FROM hivemind_app.hive_communities hc WHERE hc.name = _parent_permlink)
ELSE NULL
END) AS community_id,
(SELECT hcg.id FROM hive_category_data hcg WHERE hcg.category = _parent_permlink) AS category_id,
(SELECT hcg.id FROM hivemind_app.hive_category_data hcg WHERE hcg.category = _parent_permlink) AS category_id,
0 as root_id, -- will use id as root one if no parent
false AS is_muted, true AS is_valid,
ha.id AS author_id, hpd.id AS permlink_id, _date AS created_at,
_date AS updated_at,
calculate_time_part_of_hot(_date) AS sc_hot,
calculate_time_part_of_trending(_date) AS sc_trend,
hivemind_app.calculate_time_part_of_hot(_date) AS sc_hot,
hivemind_app.calculate_time_part_of_trending(_date) AS sc_trend,
_date AS active, (_date + INTERVAL '7 days') AS payout_at, (_date + INTERVAL '7 days') AS cashout_time, 0
, _block_num as block_num, _block_num as block_num_created
, (
SELECT ARRAY_AGG( prepare_tags )
FROM prepare_tags( ARRAY_APPEND(_metadata_tags, _parent_permlink ) )
FROM hivemind_app.prepare_tags( ARRAY_APPEND(_metadata_tags, _parent_permlink ) )
) as tags_ids
FROM hive_accounts ha,
hive_permlink_data hpd
FROM hivemind_app.hive_accounts ha,
hivemind_app.hive_permlink_data hpd
WHERE ha.name = _author and hpd.permlink = _permlink
ON CONFLICT ON CONSTRAINT hive_posts_ux1 DO UPDATE SET
@ -144,13 +144,13 @@ END
$function$
;
DROP FUNCTION if exists delete_hive_post(character varying,character varying,character varying, integer, timestamp)
DROP FUNCTION IF EXISTS hivemind_app.delete_hive_post(character varying,character varying,character varying, integer, timestamp)
;
CREATE OR REPLACE FUNCTION delete_hive_post(
in _author hive_accounts.name%TYPE,
in _permlink hive_permlink_data.permlink%TYPE,
in _block_num hive_blocks.num%TYPE,
in _date hive_posts.active%TYPE)
CREATE OR REPLACE FUNCTION hivemind_app.delete_hive_post(
in _author hivemind_app.hive_accounts.name%TYPE,
in _permlink hivemind_app.hive_permlink_data.permlink%TYPE,
in _block_num hive.hivemind_app_blocks_view.num%TYPE,
in _date hivemind_app.hive_posts.active%TYPE)
RETURNS VOID
LANGUAGE plpgsql
AS
@ -160,29 +160,29 @@ DECLARE
__post_id INT;
BEGIN
__account_id = find_account_id( _author, False );
__post_id = find_comment_id( _author, _permlink, False );
__account_id = hivemind_app.find_account_id( _author, False );
__post_id = hivemind_app.find_comment_id( _author, _permlink, False );
IF __post_id = 0 THEN
RETURN;
END IF;
UPDATE hive_posts
UPDATE hivemind_app.hive_posts
SET counter_deleted =
(
SELECT max( hps.counter_deleted ) + 1
FROM hive_posts hps
INNER JOIN hive_permlink_data hpd ON hps.permlink_id = hpd.id
FROM hivemind_app.hive_posts hps
INNER JOIN hivemind_app.hive_permlink_data hpd ON hps.permlink_id = hpd.id
WHERE hps.author_id = __account_id AND hpd.permlink = _permlink
)
,block_num = _block_num
,active = _date
WHERE id = __post_id;
DELETE FROM hive_reblogs
DELETE FROM hivemind_app.hive_reblogs
WHERE post_id = __post_id;
DELETE FROM hive_feed_cache
DELETE FROM hivemind_app.hive_feed_cache
WHERE post_id = __post_id AND account_id = __account_id;
END

View File

@ -1,5 +1,5 @@
DROP VIEW IF EXISTS public.hive_posts_base_view cascade;
CREATE OR REPLACE VIEW public.hive_posts_base_view
DROP VIEW IF EXISTS hivemind_app.hive_posts_base_view cascade;
CREATE OR REPLACE VIEW hivemind_app.hive_posts_base_view
AS
SELECT
hp.block_num
@ -10,12 +10,12 @@ SELECT
, hp.pending_payout
, hp.abs_rshares
, hp.vote_rshares AS rshares
FROM hive_posts hp
FROM hivemind_app.hive_posts hp
;
DROP VIEW IF EXISTS public.hive_posts_pp_view CASCADE;
DROP VIEW IF EXISTS hivemind_app.hive_posts_pp_view CASCADE;
CREATE OR REPLACE VIEW public.hive_posts_pp_view
CREATE OR REPLACE VIEW hivemind_app.hive_posts_pp_view
AS
SELECT hp.id,
hp.community_id,
@ -59,9 +59,9 @@ CREATE OR REPLACE VIEW public.hive_posts_pp_view
hp.is_nsfw,
hp.is_valid,
hp.block_num
FROM hive_posts hp
JOIN hive_posts pp ON pp.id = hp.parent_id
JOIN hive_permlink_data hpd_pp ON hpd_pp.id = pp.permlink_id
JOIN hive_category_data hcd ON hcd.id = hp.category_id
FROM hivemind_app.hive_posts hp
JOIN hivemind_app.hive_posts pp ON pp.id = hp.parent_id
JOIN hivemind_app.hive_permlink_data hpd_pp ON hpd_pp.id = pp.permlink_id
JOIN hivemind_app.hive_category_data hcd ON hcd.id = hp.category_id
WHERE hp.counter_deleted = 0 AND hp.id <> 0
;

View File

@ -1,6 +1,6 @@
CREATE OR REPLACE VIEW live_posts_comments_view AS SELECT * FROM hive_posts WHERE counter_deleted = 0 ;
CREATE OR REPLACE VIEW hivemind_app.live_posts_comments_view AS SELECT * FROM hivemind_app.hive_posts WHERE counter_deleted = 0 ;
CREATE OR REPLACE VIEW live_posts_view AS SELECT * FROM live_posts_comments_view WHERE depth = 0;
CREATE OR REPLACE VIEW hivemind_app.live_posts_view AS SELECT * FROM hivemind_app.live_posts_comments_view WHERE depth = 0;
CREATE OR REPLACE VIEW live_comments_view AS SELECT * FROM live_posts_comments_view WHERE depth != 0;
CREATE OR REPLACE VIEW hivemind_app.live_comments_view AS SELECT * FROM hivemind_app.live_posts_comments_view WHERE depth != 0;

View File

@ -1,6 +1,6 @@
DROP VIEW IF EXISTS hive_votes_view
DROP VIEW IF EXISTS hivemind_app.hive_votes_view
;
CREATE OR REPLACE VIEW hive_votes_view
CREATE OR REPLACE VIEW hivemind_app.hive_votes_view
AS
SELECT
hv.id,
@ -18,8 +18,8 @@ SELECT
post_id,
is_effective
FROM
hive_votes hv
INNER JOIN hive_accounts ha_v ON ha_v.id = hv.voter_id
INNER JOIN hive_accounts ha_a ON ha_a.id = hv.author_id
INNER JOIN hive_permlink_data hpd ON hpd.id = hv.permlink_id
hivemind_app.hive_votes hv
INNER JOIN hivemind_app.hive_accounts ha_v ON ha_v.id = hv.voter_id
INNER JOIN hivemind_app.hive_accounts ha_a ON ha_a.id = hv.author_id
INNER JOIN hivemind_app.hive_permlink_data hpd ON hpd.id = hv.permlink_id
;

View File

@ -1,5 +1,5 @@
DROP FUNCTION IF EXISTS date_diff() CASCADE;
CREATE OR REPLACE FUNCTION date_diff (units VARCHAR(30), start_t TIMESTAMP, end_t TIMESTAMP)
DROP FUNCTION IF EXISTS hivemind_app.date_diff() CASCADE;
CREATE OR REPLACE FUNCTION hivemind_app.date_diff (units VARCHAR(30), start_t TIMESTAMP, end_t TIMESTAMP)
RETURNS INT AS $$
DECLARE
diff_interval INTERVAL;
@ -41,9 +41,9 @@ $$ LANGUAGE plpgsql IMMUTABLE
;
DROP FUNCTION IF EXISTS public.calculate_time_part_of_trending(_post_created_at hive_posts.created_at%TYPE ) CASCADE;
CREATE OR REPLACE FUNCTION public.calculate_time_part_of_trending(
_post_created_at hive_posts.created_at%TYPE)
DROP FUNCTION IF EXISTS hivemind_app.calculate_time_part_of_trending(_post_created_at hivemind_app.hive_posts.created_at%TYPE ) CASCADE;
CREATE OR REPLACE FUNCTION hivemind_app.calculate_time_part_of_trending(
_post_created_at hivemind_app.hive_posts.created_at%TYPE)
RETURNS double precision
LANGUAGE 'plpgsql'
IMMUTABLE
@ -52,7 +52,7 @@ DECLARE
result double precision;
sec_from_epoch INT = 0;
BEGIN
sec_from_epoch = date_diff( 'second', CAST('19700101' AS TIMESTAMP), _post_created_at );
sec_from_epoch = hivemind_app.date_diff( 'second', CAST('19700101' AS TIMESTAMP), _post_created_at );
result = sec_from_epoch/240000.0;
return result;
END;
@ -60,9 +60,9 @@ $BODY$
;
DROP FUNCTION IF EXISTS public.calculate_time_part_of_hot(_post_created_at hive_posts.created_at%TYPE ) CASCADE;
CREATE OR REPLACE FUNCTION public.calculate_time_part_of_hot(
_post_created_at hive_posts.created_at%TYPE)
DROP FUNCTION IF EXISTS hivemind_app.calculate_time_part_of_hot(_post_created_at hivemind_app.hive_posts.created_at%TYPE ) CASCADE;
CREATE OR REPLACE FUNCTION hivemind_app.calculate_time_part_of_hot(
_post_created_at hivemind_app.hive_posts.created_at%TYPE)
RETURNS double precision
LANGUAGE 'plpgsql'
IMMUTABLE
@ -71,15 +71,15 @@ DECLARE
result double precision;
sec_from_epoch INT = 0;
BEGIN
sec_from_epoch = date_diff( 'second', CAST('19700101' AS TIMESTAMP), _post_created_at );
sec_from_epoch = hivemind_app.date_diff( 'second', CAST('19700101' AS TIMESTAMP), _post_created_at );
result = sec_from_epoch/10000.0;
return result;
END;
$BODY$
;
DROP FUNCTION IF EXISTS public.calculate_rhsares_part_of_hot_and_trend(_rshares hive_posts.vote_rshares%TYPE) CASCADE;
CREATE OR REPLACE FUNCTION public.calculate_rhsares_part_of_hot_and_trend(_rshares hive_posts.vote_rshares%TYPE)
DROP FUNCTION IF EXISTS hivemind_app.calculate_rhsares_part_of_hot_and_trend(_rshares hivemind_app.hive_posts.vote_rshares%TYPE) CASCADE;
CREATE OR REPLACE FUNCTION hivemind_app.calculate_rhsares_part_of_hot_and_trend(_rshares hivemind_app.hive_posts.vote_rshares%TYPE)
RETURNS double precision
LANGUAGE 'plpgsql'
IMMUTABLE
@ -97,30 +97,30 @@ END;
$BODY$
;
DROP FUNCTION IF EXISTS public.calculate_hot(hive_posts.vote_rshares%TYPE, hive_posts.created_at%TYPE);
CREATE OR REPLACE FUNCTION public.calculate_hot(
_rshares hive_posts.vote_rshares%TYPE,
_post_created_at hive_posts.created_at%TYPE)
RETURNS hive_posts.sc_hot%TYPE
DROP FUNCTION IF EXISTS hivemind_app.calculate_hot(hive_posts.vote_rshares%TYPE, hivemind_app.hive_posts.created_at%TYPE);
CREATE OR REPLACE FUNCTION hivemind_app.calculate_hot(
_rshares hivemind_app.hive_posts.vote_rshares%TYPE,
_post_created_at hivemind_app.hive_posts.created_at%TYPE)
RETURNS hivemind_app.hive_posts.sc_hot%TYPE
LANGUAGE 'plpgsql'
IMMUTABLE
AS $BODY$
BEGIN
return calculate_rhsares_part_of_hot_and_trend(_rshares) + calculate_time_part_of_hot( _post_created_at );
return hivemind_app.calculate_rhsares_part_of_hot_and_trend(_rshares) + hivemind_app.calculate_time_part_of_hot( _post_created_at );
END;
$BODY$
;
DROP FUNCTION IF EXISTS public.calculate_trending(hive_posts.vote_rshares%TYPE, hive_posts.created_at%TYPE);
CREATE OR REPLACE FUNCTION public.calculate_trending(
_rshares hive_posts.vote_rshares%TYPE,
_post_created_at hive_posts.created_at%TYPE)
RETURNS hive_posts.sc_trend%TYPE
DROP FUNCTION IF EXISTS hivemind_app.calculate_trending(hive_posts.vote_rshares%TYPE, hivemind_app.hive_posts.created_at%TYPE);
CREATE OR REPLACE FUNCTION hivemind_app.calculate_trending(
_rshares hivemind_app.hive_posts.vote_rshares%TYPE,
_post_created_at hivemind_app.hive_posts.created_at%TYPE)
RETURNS hivemind_app.hive_posts.sc_trend%TYPE
LANGUAGE 'plpgsql'
IMMUTABLE
AS $BODY$
BEGIN
return calculate_rhsares_part_of_hot_and_trend(_rshares) + calculate_time_part_of_trending( _post_created_at );
return hivemind_app.calculate_rhsares_part_of_hot_and_trend(_rshares) + hivemind_app.calculate_time_part_of_trending( _post_created_at );
END;
$BODY$
;

View File

@ -1,4 +1,4 @@
CREATE OR REPLACE FUNCTION is_superuser()
CREATE OR REPLACE FUNCTION hivemind_app.is_superuser()
RETURNS bool
LANGUAGE sql
STABLE

View File

@ -1,7 +1,7 @@
DROP FUNCTION IF EXISTS mutes_get_blacklisted_for_observer;
CREATE FUNCTION mutes_get_blacklisted_for_observer( in _observer VARCHAR, in _flags INTEGER )
DROP FUNCTION IF EXISTS hivemind_app.mutes_get_blacklisted_for_observer;
CREATE FUNCTION hivemind_app.mutes_get_blacklisted_for_observer( in _observer VARCHAR, in _flags INTEGER )
RETURNS TABLE(
account hive_accounts.name%TYPE,
account hivemind_app.hive_accounts.name%TYPE,
source VARCHAR,
is_blacklisted BOOLEAN -- False means muted
)
@ -10,15 +10,15 @@ $function$
DECLARE
__observer_id INT;
BEGIN
__observer_id = find_account_id( _observer, True );
__observer_id = hivemind_app.find_account_id( _observer, True );
IF (_flags & 1)::BOOLEAN THEN
RETURN QUERY SELECT -- mutes_get_blacklisted_for_observer (local observer blacklist)
ha.name AS account,
_observer AS source,
True
FROM
hive_follows hf
JOIN hive_accounts ha ON ha.id = hf.following
hivemind_app.hive_follows hf
JOIN hivemind_app.hive_accounts ha ON ha.id = hf.following
WHERE
hf.follower = __observer_id AND hf.blacklisted
ORDER BY account, source;
@ -29,10 +29,10 @@ BEGIN
ha.name AS source,
True
FROM
hive_follows hf
JOIN hive_follows hf_i ON hf_i.follower = hf.following
JOIN hive_accounts ha_i ON ha_i.id = hf_i.following
JOIN hive_accounts ha ON ha.id = hf.following
hivemind_app.hive_follows hf
JOIN hivemind_app.hive_follows hf_i ON hf_i.follower = hf.following
JOIN hivemind_app.hive_accounts ha_i ON ha_i.id = hf_i.following
JOIN hivemind_app.hive_accounts ha ON ha.id = hf.following
WHERE
hf.follower = __observer_id AND hf.follow_blacklists AND hf_i.blacklisted
ORDER BY account, source;
@ -43,8 +43,8 @@ BEGIN
_observer AS source,
False
FROM
hive_follows hf
JOIN hive_accounts ha ON ha.id = hf.following
hivemind_app.hive_follows hf
JOIN hivemind_app.hive_accounts ha ON ha.id = hf.following
WHERE
hf.follower = __observer_id AND hf.state = 2
ORDER BY account, source;
@ -55,10 +55,10 @@ BEGIN
ha.name AS source,
False
FROM
hive_follows hf
JOIN hive_follows hf_i ON hf_i.follower = hf.following
JOIN hive_accounts ha_i ON ha_i.id = hf_i.following
JOIN hive_accounts ha ON ha.id = hf.following
hivemind_app.hive_follows hf
JOIN hivemind_app.hive_follows hf_i ON hf_i.follower = hf.following
JOIN hivemind_app.hive_accounts ha_i ON ha_i.id = hf_i.following
JOIN hivemind_app.hive_accounts ha ON ha.id = hf.following
WHERE
hf.follower = __observer_id AND hf.follow_muted AND hf_i.state = 2
ORDER BY account, source;
@ -67,12 +67,12 @@ END
$function$
language plpgsql STABLE;
DROP FUNCTION IF EXISTS mutes_get_blacklists_for_observer;
CREATE FUNCTION mutes_get_blacklists_for_observer( in _observer VARCHAR, in _follow_blacklist BOOLEAN, in _follow_muted BOOLEAN )
DROP FUNCTION IF EXISTS hivemind_app.mutes_get_blacklists_for_observer;
CREATE FUNCTION hivemind_app.mutes_get_blacklists_for_observer( in _observer VARCHAR, in _follow_blacklist BOOLEAN, in _follow_muted BOOLEAN )
RETURNS TABLE(
list hive_accounts.name%TYPE,
posting_json_metadata hive_accounts.name%TYPE,
json_metadata hive_accounts.name%TYPE,
list hivemind_app.hive_accounts.name%TYPE,
posting_json_metadata hivemind_app.hive_accounts.name%TYPE,
json_metadata hivemind_app.hive_accounts.name%TYPE,
is_blacklist BOOLEAN -- False means mute list
)
AS
@ -80,7 +80,7 @@ $function$
DECLARE
__observer_id INT;
BEGIN
__observer_id = find_account_id( _observer, True );
__observer_id = hivemind_app.find_account_id( _observer, True );
IF _follow_blacklist THEN
RETURN QUERY SELECT -- mutes_get_blacklists_for_observer (observer blacklists)
ha.name AS list,
@ -88,8 +88,8 @@ BEGIN
ha.json_metadata::varchar AS json_metadata,
True as is_blacklist
FROM
hive_follows hf
JOIN hive_accounts ha ON ha.id = hf.following
hivemind_app.hive_follows hf
JOIN hivemind_app.hive_accounts ha ON ha.id = hf.following
WHERE
hf.follower = __observer_id AND hf.follow_blacklists
ORDER BY list;
@ -101,8 +101,8 @@ BEGIN
ha.json_metadata::VARCHAR AS json_metadata,
False AS is_blacklist
FROM
hive_follows hf
JOIN hive_accounts ha ON ha.id = hf.following
hivemind_app.hive_follows hf
JOIN hivemind_app.hive_accounts ha ON ha.id = hf.following
WHERE
hf.follower = __observer_id AND hf.follow_muted
ORDER BY list;

View File

@ -1,6 +1,6 @@
DROP TYPE IF EXISTS notification CASCADE
DROP TYPE IF EXISTS hivemind_app.notification CASCADE
;
CREATE TYPE notification AS
CREATE TYPE hivemind_app.notification AS
(
id BIGINT
, type_id SMALLINT
@ -16,8 +16,8 @@ CREATE TYPE notification AS
, number_of_mentions INTEGER
);
DROP FUNCTION IF EXISTS get_number_of_unread_notifications;
CREATE OR REPLACE FUNCTION get_number_of_unread_notifications(in _account VARCHAR, in _minimum_score SMALLINT)
DROP FUNCTION IF EXISTS hivemind_app.get_number_of_unread_notifications;
CREATE OR REPLACE FUNCTION hivemind_app.get_number_of_unread_notifications(in _account VARCHAR, in _minimum_score SMALLINT)
RETURNS TABLE( lastread_at TIMESTAMP, unread BIGINT )
LANGUAGE 'plpgsql' STABLE
AS
@ -25,18 +25,18 @@ $BODY$
DECLARE
__account_id INT := 0;
__last_read_at TIMESTAMP;
__last_read_at_block hive_blocks.num%TYPE;
__limit_block hive_blocks.num%TYPE = block_before_head( '90 days' );
__last_read_at_block hive.hivemind_app_blocks_view.num%TYPE;
__limit_block hive.hivemind_app_blocks_view.num%TYPE = hivemind_app.block_before_head( '90 days' );
BEGIN
__account_id = find_account_id( _account, True );
__account_id = hivemind_app.find_account_id( _account, True );
SELECT ha.lastread_at INTO __last_read_at
FROM hive_accounts ha
FROM hivemind_app.hive_accounts ha
WHERE ha.id = __account_id;
--- Warning given account can have no last_read_at set, so lets fallback to the block limit to avoid comparison to NULL.
SELECT COALESCE((SELECT hb.num
FROM hive_blocks hb
FROM hive.blocks_view hb -- very important for performance (originally it was a hivemind_app_blocks_view)
WHERE hb.created_at <= __last_read_at
ORDER by hb.created_at desc
LIMIT 1), __limit_block)
@ -45,15 +45,15 @@ BEGIN
RETURN QUERY SELECT
__last_read_at as lastread_at,
count(1) as unread
FROM hive_notification_cache hnv
FROM hivemind_app.hive_notification_cache hnv
WHERE hnv.dst = __account_id AND hnv.block_num > __limit_block AND hnv.block_num > __last_read_at_block AND hnv.score >= _minimum_score
;
END
$BODY$
;
DROP FUNCTION IF EXISTS get_number_of_mentions_in_post;
CREATE OR REPLACE FUNCTION public.get_number_of_mentions_in_post( _post_id hive_posts.id%TYPE )
DROP FUNCTION IF EXISTS hivemind_app.get_number_of_mentions_in_post;
CREATE OR REPLACE FUNCTION hivemind_app.get_number_of_mentions_in_post( _post_id hivemind_app.hive_posts.id%TYPE )
RETURNS INTEGER
LANGUAGE 'plpgsql'
STABLE
@ -62,26 +62,26 @@ $BODY$
DECLARE
__result INTEGER;
BEGIN
SELECT COUNT(*) INTO __result FROM hive_mentions hm WHERE hm.post_id = _post_id;
SELECT COUNT(*) INTO __result FROM hivemind_app.hive_mentions hm WHERE hm.post_id = _post_id;
return __result;
END
$BODY$;
DROP FUNCTION IF EXISTS account_notifications;
CREATE OR REPLACE FUNCTION public.account_notifications(
DROP FUNCTION IF EXISTS hivemind_app.account_notifications;
CREATE OR REPLACE FUNCTION hivemind_app.account_notifications(
_account character varying,
_min_score smallint,
_last_id bigint,
_limit smallint)
RETURNS SETOF notification
RETURNS SETOF hivemind_app.notification
LANGUAGE 'plpgsql'
STABLE
AS $BODY$
DECLARE
__account_id INT;
__limit_block hive_blocks.num%TYPE = block_before_head( '90 days' );
__limit_block hive.hivemind_app_blocks_view.num%TYPE = hivemind_app.block_before_head( '90 days' );
BEGIN
__account_id = find_account_id( _account, True );
__account_id = hivemind_app.find_account_id( _account, True );
RETURN QUERY SELECT
hnv.id
, CAST( hnv.type_id as SMALLINT) as type_id
@ -98,20 +98,20 @@ BEGIN
FROM
(
select nv.id, nv.type_id, nv.created_at, nv.src, nv.dst, nv.post_id, nv.score, nv.community, nv.community_title, nv.payload
from hive_notification_cache nv
from hivemind_app.hive_notification_cache nv
WHERE nv.dst = __account_id AND nv.block_num > __limit_block AND nv.score >= _min_score AND ( _last_id = 0 OR nv.id < _last_id )
ORDER BY nv.id DESC
LIMIT _limit
) hnv
join hive_posts hp on hnv.post_id = hp.id
join hive_accounts ha on hp.author_id = ha.id
join hive_accounts hs on hs.id = hnv.src
join hive_accounts hd on hd.id = hnv.dst
join hive_permlink_data hpd on hp.permlink_id = hpd.id,
join hivemind_app.hive_posts hp on hnv.post_id = hp.id
join hivemind_app.hive_accounts ha on hp.author_id = ha.id
join hivemind_app.hive_accounts hs on hs.id = hnv.src
join hivemind_app.hive_accounts hd on hd.id = hnv.dst
join hivemind_app.hive_permlink_data hpd on hp.permlink_id = hpd.id,
lateral ( SELECT
CASE
WHEN hnv.type_id != 16 THEN 0 --evrything else than mentions (only optimization)
ELSE get_number_of_mentions_in_post( hnv.post_id )
ELSE hivemind_app.get_number_of_mentions_in_post( hnv.post_id )
END as mentions
) as hm
ORDER BY hnv.id DESC
@ -119,17 +119,17 @@ BEGIN
END
$BODY$;
DROP FUNCTION IF EXISTS post_notifications
DROP FUNCTION IF EXISTS hivemind_app.post_notifications
;
CREATE OR REPLACE FUNCTION post_notifications(in _author VARCHAR, in _permlink VARCHAR, in _min_score SMALLINT, in _last_id BIGINT, in _limit SMALLINT)
RETURNS SETOF notification
CREATE OR REPLACE FUNCTION hivemind_app.post_notifications(in _author VARCHAR, in _permlink VARCHAR, in _min_score SMALLINT, in _last_id BIGINT, in _limit SMALLINT)
RETURNS SETOF hivemind_app.notification
AS
$function$
DECLARE
__post_id INT;
__limit_block hive_blocks.num%TYPE = block_before_head( '90 days' );
__limit_block hive.hivemind_app_blocks_view.num%TYPE = hivemind_app.block_before_head( '90 days' );
BEGIN
__post_id = find_comment_id(_author, _permlink, True);
__post_id = hivemind_app.find_comment_id(_author, _permlink, True);
RETURN QUERY SELECT
hnv.id
, CAST( hnv.type_id as SMALLINT) as type_id
@ -146,20 +146,20 @@ BEGIN
FROM
(
SELECT nv.id, nv.type_id, nv.created_at, nv.src, nv.dst, nv.dst_post_id, nv.score, nv.community, nv.community_title, nv.payload, nv.post_id
FROM hive_notification_cache nv
FROM hivemind_app.hive_notification_cache nv
WHERE nv.dst_post_id = __post_id AND nv.block_num > __limit_block AND nv.score >= _min_score AND ( _last_id = 0 OR nv.id < _last_id )
ORDER BY nv.id DESC
LIMIT _limit
) hnv
JOIN hive_posts hp ON hnv.post_id = hp.id
JOIN hive_accounts ha ON hp.author_id = ha.id
JOIN hive_accounts hs ON hs.id = hnv.src
JOIN hive_accounts hd ON hd.id = hnv.dst
JOIN hive_permlink_data hpd ON hp.permlink_id = hpd.id,
JOIN hivemind_app.hive_posts hp ON hnv.post_id = hp.id
JOIN hivemind_app.hive_accounts ha ON hp.author_id = ha.id
JOIN hivemind_app.hive_accounts hs ON hs.id = hnv.src
JOIN hivemind_app.hive_accounts hd ON hd.id = hnv.dst
JOIN hivemind_app.hive_permlink_data hpd ON hp.permlink_id = hpd.id,
lateral ( SELECT
CASE
WHEN hnv.type_id != 16 THEN 0 --evrything else than mentions (only optimization)
ELSE get_number_of_mentions_in_post( hnv.post_id )
ELSE hivemind_app.get_number_of_mentions_in_post( hnv.post_id )
END as mentions
) as hm
ORDER BY hnv.id DESC
@ -169,26 +169,26 @@ $function$
LANGUAGE plpgsql STABLE
;
DROP FUNCTION IF EXISTS update_notification_cache;
DROP FUNCTION IF EXISTS hivemind_app.update_notification_cache;
;
CREATE OR REPLACE FUNCTION update_notification_cache(in _first_block_num INT, in _last_block_num INT, in _prune_old BOOLEAN)
CREATE OR REPLACE FUNCTION hivemind_app.update_notification_cache(in _first_block_num INT, in _last_block_num INT, in _prune_old BOOLEAN)
RETURNS VOID
AS
$function$
DECLARE
__limit_block hive_blocks.num%TYPE = block_before_head( '90 days' );
__limit_block hive.hivemind_app_blocks_view.num%TYPE = hivemind_app.block_before_head( '90 days' );
BEGIN
IF _first_block_num IS NULL THEN
TRUNCATE TABLE hive_notification_cache;
ALTER SEQUENCE hive_notification_cache_id_seq RESTART WITH 1;
TRUNCATE TABLE hivemind_app.hive_notification_cache;
ALTER SEQUENCE hivemind_app.hive_notification_cache_id_seq RESTART WITH 1;
ELSE
DELETE FROM hive_notification_cache nc WHERE _prune_old AND nc.block_num <= __limit_block;
DELETE FROM hivemind_app.hive_notification_cache nc WHERE _prune_old AND nc.block_num <= __limit_block;
END IF;
INSERT INTO hive_notification_cache
INSERT INTO hivemind_app.hive_notification_cache
(block_num, type_id, created_at, src, dst, dst_post_id, post_id, score, payload, community, community_title)
SELECT nv.block_num, nv.type_id, nv.created_at, nv.src, nv.dst, nv.dst_post_id, nv.post_id, nv.score, nv.payload, nv.community, nv.community_title
FROM hive_raw_notifications_view nv
FROM hivemind_app.hive_raw_notifications_view nv
WHERE nv.block_num > __limit_block AND (_first_block_num IS NULL OR nv.block_num BETWEEN _first_block_num AND _last_block_num)
ORDER BY nv.block_num, nv.type_id, nv.created_at, nv.src, nv.dst, nv.dst_post_id, nv.post_id
;

View File

@ -1,6 +1,6 @@
DROP VIEW IF EXISTS public.hive_accounts_rank_view CASCADE;
DROP VIEW IF EXISTS hivemind_app.hive_accounts_rank_view CASCADE;
CREATE OR REPLACE VIEW public.hive_accounts_rank_view
CREATE OR REPLACE VIEW hivemind_app.hive_accounts_rank_view
AS
select ha.id,
case
@ -11,11 +11,11 @@ select ha.id,
WHEN ds.account_rank < 100000 THEN 30
ELSE 20
end AS score
from hive_accounts ha
from hivemind_app.hive_accounts ha
left join
(
SELECT ha3.id, rank() OVER (ORDER BY ha3.reputation DESC) as account_rank
FROM hive_accounts ha3
FROM hivemind_app.hive_accounts ha3
order by ha3.reputation desc
limit 150000
-- Conditions above (related to rank.position) eliminates all records having rank > 100k. So with inclding some
@ -26,9 +26,9 @@ left join
) ds on ds.id = ha.id
;
DROP FUNCTION IF EXISTS public.calculate_notify_vote_score(_payout hive_posts.payout%TYPE, _abs_rshares hive_posts.abs_rshares%TYPE, _rshares hive_votes.rshares%TYPE) CASCADE
DROP FUNCTION IF EXISTS hivemind_app.calculate_notify_vote_score(_payout hivemind_app.hive_posts.payout%TYPE, _abs_rshares hivemind_app.hive_posts.abs_rshares%TYPE, _rshares hivemind_app.hive_votes.rshares%TYPE) CASCADE
;
CREATE OR REPLACE FUNCTION public.calculate_notify_vote_score(_payout hive_posts.payout%TYPE, _abs_rshares hive_posts.abs_rshares%TYPE, _rshares hive_votes.rshares%TYPE)
CREATE OR REPLACE FUNCTION hivemind_app.calculate_notify_vote_score(_payout hivemind_app.hive_posts.payout%TYPE, _abs_rshares hivemind_app.hive_posts.abs_rshares%TYPE, _rshares hivemind_app.hive_votes.rshares%TYPE)
RETURNS INT
LANGUAGE 'sql'
IMMUTABLE
@ -39,9 +39,9 @@ AS $BODY$
END;
$BODY$;
DROP FUNCTION IF EXISTS notification_id CASCADE;
DROP FUNCTION IF EXISTS hivemind_app.notification_id CASCADE;
;
CREATE OR REPLACE FUNCTION notification_id(in _block_number INTEGER, in _notifyType INTEGER, in _id INTEGER)
CREATE OR REPLACE FUNCTION hivemind_app.notification_id(in _block_number INTEGER, in _notifyType INTEGER, in _id INTEGER)
RETURNS BIGINT
AS
$function$
@ -54,11 +54,11 @@ $function$
LANGUAGE plpgsql IMMUTABLE
;
DROP FUNCTION IF EXISTS public.calculate_value_of_vote_on_post CASCADE;
CREATE OR REPLACE FUNCTION public.calculate_value_of_vote_on_post(
_post_payout hive_posts.payout%TYPE
, _post_rshares hive_posts.vote_rshares%TYPE
, _vote_rshares hive_votes.rshares%TYPE)
DROP FUNCTION IF EXISTS hivemind_app.calculate_value_of_vote_on_post CASCADE;
CREATE OR REPLACE FUNCTION hivemind_app.calculate_value_of_vote_on_post(
_post_payout hivemind_app.hive_posts.payout%TYPE
, _post_rshares hivemind_app.hive_posts.vote_rshares%TYPE
, _vote_rshares hivemind_app.hive_votes.rshares%TYPE)
RETURNS FLOAT
LANGUAGE 'sql'
IMMUTABLE
@ -71,10 +71,10 @@ AS $BODY$
$BODY$;
-- View: public.hive_raw_notifications_as_view
-- View: hivemind_app.hive_raw_notifications_as_view
DROP VIEW IF EXISTS public.hive_raw_notifications_as_view CASCADE;
CREATE OR REPLACE VIEW public.hive_raw_notifications_as_view
DROP VIEW IF EXISTS hivemind_app.hive_raw_notifications_as_view CASCADE;
CREATE OR REPLACE VIEW hivemind_app.hive_raw_notifications_as_view
AS
SELECT notifs.block_num,
notifs.id,
@ -89,7 +89,7 @@ CREATE OR REPLACE VIEW public.hive_raw_notifications_as_view
notifs.payload,
harv.score
FROM ( SELECT hpv.block_num,
notification_id(hpv.block_num,
hivemind_app.notification_id(hpv.block_num,
CASE hpv.depth
WHEN 1 THEN 12
ELSE 13
@ -106,30 +106,29 @@ CREATE OR REPLACE VIEW public.hive_raw_notifications_as_view
''::character varying(16) AS community,
''::character varying AS community_title,
''::character varying AS payload
FROM hive_posts_pp_view hpv
FROM hivemind_app.hive_posts_pp_view hpv
WHERE hpv.depth > 0 AND
NOT EXISTS (SELECT NULL::text
FROM hive_follows hf
FROM hivemind_app.hive_follows hf
WHERE hf.follower = hpv.parent_author_id AND hf.following = hpv.author_id AND hf.state = 2)
UNION ALL
SELECT hf.block_num,
notification_id(hf.block_num, 15, hf.id) AS id,
hivemind_app.notification_id(hf.block_num, 15, hf.id) AS id,
0 AS post_id,
15 AS type_id,
hb.created_at,
(select hb.created_at from hive.hivemind_app_blocks_view hb where hb.num = (hf.block_num - 1)) as created_at, -- use time of previous block to match head_block_time behavior at given block
hf.follower AS src,
hf.following AS dst,
0 as dst_post_id,
''::character varying(16) AS community,
''::character varying AS community_title,
''::character varying AS payload
FROM hive_follows hf
JOIN hive_blocks hb ON hb.num = hf.block_num - 1 -- use time of previous block to match head_block_time behavior at given block
FROM hivemind_app.hive_follows hf
WHERE hf.state = 1 --only follow blog
UNION ALL
SELECT hr.block_num,
notification_id(hr.block_num, 14, hr.id) AS id,
hivemind_app.notification_id(hr.block_num, 14, hr.id) AS id,
hp.id AS post_id,
14 AS type_id,
hr.created_at,
@ -139,11 +138,11 @@ UNION ALL
''::character varying(16) AS community,
''::character varying AS community_title,
''::character varying AS payload
FROM hive_reblogs hr
JOIN hive_posts hp ON hr.post_id = hp.id
FROM hivemind_app.hive_reblogs hr
JOIN hivemind_app.hive_posts hp ON hr.post_id = hp.id
UNION ALL
SELECT hs.block_num,
notification_id(hs.block_num, 11, hs.id) AS id,
hivemind_app.notification_id(hs.block_num, 11, hs.id) AS id,
0 AS post_id,
11 AS type_id,
hs.created_at,
@ -153,29 +152,28 @@ UNION ALL
hc.name AS community,
hc.title AS community_title,
''::character varying AS payload
FROM hive_subscriptions hs
JOIN hive_communities hc ON hs.community_id = hc.id
FROM hivemind_app.hive_subscriptions hs
JOIN hivemind_app.hive_communities hc ON hs.community_id = hc.id
UNION ALL
SELECT hm.block_num,
notification_id(hm.block_num, 16, hm.id) AS id,
hivemind_app.notification_id(hm.block_num, 16, hm.id) AS id,
hm.post_id,
16 AS type_id,
hb.created_at,
(select hb.created_at from hive.hivemind_app_blocks_view hb where hb.num = (hm.block_num - 1)) as created_at, -- use time of previous block to match head_block_time behavior at given block
hp.author_id AS src,
hm.account_id AS dst,
hm.post_id as dst_post_id,
''::character varying(16) AS community,
''::character varying AS community_title,
''::character varying AS payload
FROM hive_mentions hm
JOIN hive_posts hp ON hm.post_id = hp.id
JOIN hive_blocks hb ON hb.num = hm.block_num - 1 -- use time of previous block to match head_block_time behavior at given block
FROM hivemind_app.hive_mentions hm
JOIN hivemind_app.hive_posts hp ON hm.post_id = hp.id
) notifs
JOIN hive_accounts_rank_view harv ON harv.id = notifs.src
JOIN hivemind_app.hive_accounts_rank_view harv ON harv.id = notifs.src
;
DROP VIEW IF EXISTS hive_raw_notifications_view_noas cascade;
CREATE OR REPLACE VIEW hive_raw_notifications_view_noas
DROP VIEW IF EXISTS hivemind_app.hive_raw_notifications_view_noas cascade;
CREATE OR REPLACE VIEW hivemind_app.hive_raw_notifications_view_noas
AS
SELECT -- votes
vn.block_num
@ -197,7 +195,7 @@ FROM
(
SELECT
hv1.block_num
, notification_id(hv1.block_num, 17, hv1.id::integer) AS id
, hivemind_app.notification_id(hv1.block_num, 17, hv1.id::integer) AS id
, hpv.id AS post_id
, 17 AS type_id
, hv1.last_update AS created_at
@ -206,9 +204,9 @@ FROM
, hpv.id AS dst_post_id
, ''::VARCHAR(16) AS community
, ''::VARCHAR AS community_title
, calculate_value_of_vote_on_post(hpv.payout + hpv.pending_payout, hpv.rshares, hv1.rshares) AS vote_value
, calculate_notify_vote_score(hpv.payout + hpv.pending_payout, hpv.abs_rshares, hv1.rshares) AS score
FROM hive_votes hv1
, hivemind_app.calculate_value_of_vote_on_post(hpv.payout + hpv.pending_payout, hpv.rshares, hv1.rshares) AS vote_value
, hivemind_app.calculate_notify_vote_score(hpv.payout + hpv.pending_payout, hpv.abs_rshares, hv1.rshares) AS score
FROM hivemind_app.hive_votes hv1
JOIN
(
SELECT
@ -218,8 +216,8 @@ FROM
, hpvi.pending_payout
, hpvi.abs_rshares
, hpvi.vote_rshares as rshares
FROM hive_posts hpvi
WHERE hpvi.block_num > block_before_head('97 days'::interval)
FROM hivemind_app.hive_posts hpvi
WHERE hpvi.block_num > hivemind_app.block_before_head('97 days'::interval)
) hpv ON hv1.post_id = hpv.id
WHERE hv1.rshares >= 10e9
) as vn
@ -227,7 +225,7 @@ FROM
UNION ALL
SELECT -- new community
hc.block_num as block_num
, notification_id(hc.block_num, 11, hc.id) as id
, hivemind_app.notification_id(hc.block_num, 11, hc.id) as id
, 0 as post_id
, 1 as type_id
, hc.created_at as created_at
@ -239,11 +237,11 @@ UNION ALL
, ''::VARCHAR as payload
, 35 as score
FROM
hive_communities hc
hivemind_app.hive_communities hc
UNION ALL
SELECT --persistent notifs
hn.block_num
, notification_id(hn.block_num, hn.type_id, CAST( hn.id as INT) ) as id
, hivemind_app.notification_id(hn.block_num, hn.type_id, CAST( hn.id as INT) ) as id
, hn.post_id as post_id
, hn.type_id as type_id
, hn.created_at as created_at
@ -254,18 +252,18 @@ UNION ALL
, hc.title as community_title
, hn.payload as payload
, hn.score as score
FROM hive_notifs hn
JOIN hive_communities hc ON hn.community_id = hc.id
FROM hivemind_app.hive_notifs hn
JOIN hivemind_app.hive_communities hc ON hn.community_id = hc.id
;
DROP VIEW IF EXISTS hive_raw_notifications_view CASCADE;
CREATE OR REPLACE VIEW hive_raw_notifications_view
DROP VIEW IF EXISTS hivemind_app.hive_raw_notifications_view CASCADE;
CREATE OR REPLACE VIEW hivemind_app.hive_raw_notifications_view
AS
SELECT *
FROM
(
SELECT * FROM hive_raw_notifications_as_view
SELECT * FROM hivemind_app.hive_raw_notifications_as_view
UNION ALL
SELECT * FROM hive_raw_notifications_view_noas
SELECT * FROM hivemind_app.hive_raw_notifications_view_noas
) as notifs
WHERE notifs.score >= 0 AND notifs.src IS DISTINCT FROM notifs.dst;

View File

@ -1,14 +1,14 @@
DROP MATERIALIZED VIEW IF EXISTS payout_stats_view;
DROP MATERIALIZED VIEW IF EXISTS hivemind_app.payout_stats_view;
CREATE MATERIALIZED VIEW payout_stats_view AS
CREATE MATERIALIZED VIEW hivemind_app.payout_stats_view AS
SELECT
hp1.community_id,
ha.name AS author,
SUM( hp1.payout + hp1.pending_payout ) AS payout,
COUNT(*) AS posts,
NULL AS authors
FROM hive_posts hp1
JOIN hive_accounts ha ON ha.id = hp1.author_id
FROM hivemind_app.hive_posts hp1
JOIN hivemind_app.hive_accounts ha ON ha.id = hp1.author_id
WHERE hp1.counter_deleted = 0 AND NOT hp1.is_paidout AND hp1.id != 0
GROUP BY community_id, author
@ -20,12 +20,12 @@ CREATE MATERIALIZED VIEW payout_stats_view AS
SUM( hp2.payout + hp2.pending_payout ) AS payout,
COUNT(*) AS posts,
COUNT(DISTINCT(author_id)) AS authors
FROM hive_posts hp2
FROM hivemind_app.hive_posts hp2
WHERE hp2.counter_deleted = 0 AND NOT hp2.is_paidout AND hp2.id != 0
GROUP BY community_id
WITH DATA
;
CREATE UNIQUE INDEX IF NOT EXISTS payout_stats_view_ix1 ON payout_stats_view (community_id, author );
CREATE INDEX IF NOT EXISTS payout_stats_view_ix2 ON payout_stats_view (community_id, author, payout);
CREATE UNIQUE INDEX IF NOT EXISTS payout_stats_view_ix1 ON hivemind_app.payout_stats_view (community_id, author );
CREATE INDEX IF NOT EXISTS payout_stats_view_ix2 ON hivemind_app.payout_stats_view (community_id, author, payout);

View File

@ -5,8 +5,8 @@ See http://pretius.com/postgresql-stop-worrying-about-table-and-view-dependencie
Enhanced by Wojciech Barcik wbarcik@syncad.com (handling of rules).
*/
DROP TABLE IF EXISTS deps_saved_ddl;
DROP SEQUENCE IF EXISTS deps_saved_ddl_deps_id_seq;
DROP TABLE IF EXISTS hivemind_app.deps_saved_ddl;
DROP SEQUENCE IF EXISTS hivemind_app.deps_saved_ddl_deps_id_seq;
-- SEQUENCE: deps_saved_ddl_deps_id_seq
@ -38,7 +38,7 @@ CREATE TABLE if not exists deps_saved_ddl
-- DROP FUNCTION deps_save_and_drop_dependencies(character varying, character varying, boolean);
CREATE OR REPLACE FUNCTION deps_save_and_drop_dependencies(
CREATE OR REPLACE FUNCTION hivemind_app.deps_save_and_drop_dependencies(
p_view_schema character varying,
p_view_name character varying,
drop_relation boolean DEFAULT true
@ -165,7 +165,7 @@ $BODY$;
-- DROP FUNCTION deps_restore_dependencies(character varying, character varying);
CREATE OR REPLACE FUNCTION deps_restore_dependencies(
CREATE OR REPLACE FUNCTION hivemind_app.deps_restore_dependencies(
p_view_schema character varying,
p_view_name character varying
)

View File

@ -1,9 +1,9 @@
DROP FUNCTION IF EXISTS update_communities_posts_data_and_rank;
CREATE FUNCTION update_communities_posts_data_and_rank()
DROP FUNCTION IF EXISTS hivemind_app.update_communities_posts_data_and_rank;
CREATE FUNCTION hivemind_app.update_communities_posts_data_and_rank()
RETURNS void
AS
$function$
UPDATE hive_communities hc SET
UPDATE hivemind_app.hive_communities hc SET
num_pending = cr.posts,
sum_pending = cr.payouts,
num_authors = cr.authors,
@ -16,13 +16,13 @@ FROM
COALESCE(p.posts, 0) as posts,
COALESCE(p.payouts, 0) as payouts,
COALESCE(p.authors, 0) as authors
FROM hive_communities c
FROM hivemind_app.hive_communities c
LEFT JOIN (
SELECT hp.community_id,
COUNT(*) posts,
ROUND(SUM(hp.pending_payout)) payouts,
COUNT(DISTINCT hp.author_id) authors
FROM hive_posts hp
FROM hivemind_app.hive_posts hp
WHERE community_id IS NOT NULL
AND NOT hp.is_paidout
AND hp.counter_deleted = 0

View File

@ -1,25 +1,25 @@
DROP FUNCTION IF EXISTS update_feed_cache(in _from_block_num INTEGER, in _to_block_num INTEGER);
CREATE OR REPLACE FUNCTION update_feed_cache(in _from_block_num INTEGER, in _to_block_num INTEGER)
DROP FUNCTION IF EXISTS hivemind_app.update_feed_cache(in _from_block_num INTEGER, in _to_block_num INTEGER);
CREATE OR REPLACE FUNCTION hivemind_app.update_feed_cache(in _from_block_num INTEGER, in _to_block_num INTEGER)
RETURNS void
LANGUAGE 'plpgsql'
VOLATILE
AS $BODY$
BEGIN
INSERT INTO
hive_feed_cache (account_id, post_id, created_at, block_num)
hivemind_app.hive_feed_cache (account_id, post_id, created_at, block_num)
SELECT
hp.author_id, hp.id, hp.created_at, hp.block_num
FROM
hive_posts hp
hivemind_app.hive_posts hp
WHERE hp.depth = 0 AND hp.counter_deleted = 0 AND ((_from_block_num IS NULL AND _to_block_num IS NULL) OR (hp.block_num BETWEEN _from_block_num AND _to_block_num))
ON CONFLICT DO NOTHING;
INSERT INTO
hive_feed_cache (account_id, post_id, created_at, block_num)
hivemind_app.hive_feed_cache (account_id, post_id, created_at, block_num)
SELECT
hr.blogger_id, hr.post_id, hr.created_at, hr.block_num
FROM
hive_reblogs hr
hivemind_app.hive_reblogs hr
WHERE (_from_block_num IS NULL AND _to_block_num IS NULL) OR (hr.block_num BETWEEN _from_block_num AND _to_block_num)
ON CONFLICT DO NOTHING;
END

View File

@ -1,28 +1,28 @@
DROP FUNCTION IF EXISTS update_follow_count(hive_blocks.num%TYPE, hive_blocks.num%TYPE);
CREATE OR REPLACE FUNCTION update_follow_count(
in _first_block hive_blocks.num%TYPE,
in _last_block hive_blocks.num%TYPE
DROP FUNCTION IF EXISTS hivemind_app.update_follow_count(hive.hivemind_app_blocks_view.num%TYPE, hive.hivemind_app_blocks_view.num%TYPE);
CREATE OR REPLACE FUNCTION hivemind_app.update_follow_count(
in _first_block hive.hivemind_app_blocks_view.num%TYPE,
in _last_block hive.hivemind_app_blocks_view.num%TYPE
)
RETURNS VOID
LANGUAGE 'plpgsql'
AS
$BODY$
BEGIN
UPDATE hive_accounts ha
UPDATE hivemind_app.hive_accounts ha
SET
followers = data_set.followers_count,
following = data_set.following_count
FROM
(
WITH data_cfe(user_id) AS (
SELECT DISTINCT following FROM hive_follows WHERE block_num BETWEEN _first_block AND _last_block
SELECT DISTINCT following FROM hivemind_app.hive_follows WHERE block_num BETWEEN _first_block AND _last_block
UNION
SELECT DISTINCT follower FROM hive_follows WHERE block_num BETWEEN _first_block AND _last_block
SELECT DISTINCT follower FROM hivemind_app.hive_follows WHERE block_num BETWEEN _first_block AND _last_block
)
SELECT
data_cfe.user_id AS user_id,
(SELECT COUNT(1) FROM hive_follows hf1 WHERE hf1.following = data_cfe.user_id AND hf1.state = 1) AS followers_count,
(SELECT COUNT(1) FROM hive_follows hf2 WHERE hf2.follower = data_cfe.user_id AND hf2.state = 1) AS following_count
(SELECT COUNT(1) FROM hivemind_app.hive_follows hf1 WHERE hf1.following = data_cfe.user_id AND hf1.state = 1) AS followers_count,
(SELECT COUNT(1) FROM hivemind_app.hive_follows hf2 WHERE hf2.follower = data_cfe.user_id AND hf2.state = 1) AS following_count
FROM
data_cfe
) AS data_set(user_id, followers_count, following_count)

View File

@ -1,22 +1,30 @@
DROP FUNCTION IF EXISTS update_hive_blocks_consistency_flag;
CREATE OR REPLACE FUNCTION update_hive_blocks_consistency_flag(
in _first_block_num INTEGER,
in _last_block_num INTEGER)
RETURNS VOID
LANGUAGE 'plpgsql'
VOLATILE
AS $BODY$
DROP FUNCTION IF EXISTS hivemind_app.update_last_imported_block;
CREATE OR REPLACE FUNCTION hivemind_app.update_last_imported_block(
in _block_number INTEGER,
in _block_date TIMESTAMP)
RETURNS VOID
LANGUAGE 'plpgsql'
VOLATILE
AS
$BODY$
BEGIN
IF _first_block_num IS NULL OR _last_block_num IS NULL THEN
RAISE EXCEPTION 'First/last block number is required' USING ERRCODE = 'CEHMA';
END IF;
UPDATE hive_blocks
SET completed = True
WHERE num BETWEEN _first_block_num AND _last_block_num;
UPDATE hivemind_app.hive_state
SET last_imported_block_num = _block_number, last_imported_block_date = _block_date;
END
$BODY$
;
DROP FUNCTION IF EXISTS hivemind_app.update_last_completed_block;
CREATE OR REPLACE FUNCTION hivemind_app.update_last_completed_block(
in _block_number INTEGER)
RETURNS VOID
LANGUAGE 'plpgsql'
VOLATILE
AS
$BODY$
BEGIN
UPDATE hivemind_app.hive_state
SET last_completed_block_num = _block_number;
END
$BODY$
;

View File

@ -1,6 +1,6 @@
DROP FUNCTION IF EXISTS public.update_hive_posts_root_id(INTEGER, INTEGER);
DROP FUNCTION IF EXISTS hivemind_app.update_hive_posts_root_id(INTEGER, INTEGER);
CREATE OR REPLACE FUNCTION public.update_hive_posts_root_id(in _first_block_num INTEGER, _last_block_num INTEGER)
CREATE OR REPLACE FUNCTION hivemind_app.update_hive_posts_root_id(in _first_block_num INTEGER, _last_block_num INTEGER)
RETURNS void
LANGUAGE 'plpgsql'
VOLATILE
@ -8,7 +8,7 @@ AS $BODY$
BEGIN
--- _first_block_num can be null together with _last_block_num
UPDATE hive_posts uhp
UPDATE hivemind_app.hive_posts uhp
SET root_id = id
WHERE uhp.root_id = 0 AND (_first_block_num IS NULL OR (uhp.block_num >= _first_block_num AND uhp.block_num <= _last_block_num))
;

View File

@ -1,6 +1,6 @@
DROP FUNCTION IF EXISTS public.update_hive_posts_api_helper(INTEGER, INTEGER);
DROP FUNCTION IF EXISTS hivemind_app.update_hive_posts_api_helper(INTEGER, INTEGER);
CREATE OR REPLACE FUNCTION public.update_hive_posts_api_helper(in _first_block_num INTEGER, _last_block_num INTEGER)
CREATE OR REPLACE FUNCTION hivemind_app.update_hive_posts_api_helper(in _first_block_num INTEGER, _last_block_num INTEGER)
RETURNS void
LANGUAGE 'plpgsql'
VOLATILE
@ -8,20 +8,20 @@ AS $BODY$
BEGIN
IF _first_block_num IS NULL OR _last_block_num IS NULL THEN
-- initial creation of table.
INSERT INTO hive_posts_api_helper
INSERT INTO hivemind_app.hive_posts_api_helper
(id, author_s_permlink)
SELECT hp.id, hp.author || '/' || hp.permlink
FROM live_posts_comments_view hp
JOIN hive_accounts ha ON (ha.id = hp.author_id)
JOIN hive_permlink_data hpd_p ON (hpd_p.id = hp.permlink_id)
FROM hivemind_app.live_posts_comments_view hp
JOIN hivemind_app.hive_accounts ha ON (ha.id = hp.author_id)
JOIN hivemind_app.hive_permlink_data hpd_p ON (hpd_p.id = hp.permlink_id)
;
ELSE
-- Regular incremental update.
INSERT INTO hive_posts_api_helper (id, author_s_permlink)
INSERT INTO hivemind_app.hive_posts_api_helper (id, author_s_permlink)
SELECT hp.id, ha.name || '/' || hpd_p.permlink
FROM live_posts_comments_view hp
JOIN hive_accounts ha ON (ha.id = hp.author_id)
JOIN hive_permlink_data hpd_p ON (hpd_p.id = hp.permlink_id)
FROM hivemind_app.live_posts_comments_view hp
JOIN hivemind_app.hive_accounts ha ON (ha.id = hp.author_id)
JOIN hivemind_app.hive_permlink_data hpd_p ON (hpd_p.id = hp.permlink_id)
WHERE hp.block_num BETWEEN _first_block_num AND _last_block_num
ON CONFLICT (id) DO NOTHING
;

View File

@ -1,11 +1,11 @@
DROP FUNCTION IF EXISTS public.update_hive_posts_children_count;
CREATE OR REPLACE FUNCTION public.update_hive_posts_children_count(in _first_block INTEGER, in _last_block INTEGER)
DROP FUNCTION IF EXISTS hivemind_app.update_hive_posts_children_count;
CREATE OR REPLACE FUNCTION hivemind_app.update_hive_posts_children_count(in _first_block INTEGER, in _last_block INTEGER)
RETURNS void
LANGUAGE 'plpgsql'
VOLATILE
AS $BODY$
BEGIN
UPDATE hive_posts uhp
UPDATE hivemind_app.hive_posts uhp
SET children = data_source.delta + uhp.children
FROM
(
@ -35,7 +35,7 @@ WITH recursive tblChild AS
ELSE 0
END
) as delta_deleted
FROM hive_posts h1
FROM hivemind_app.hive_posts h1
WHERE h1.block_num BETWEEN _first_block AND _last_block OR h1.block_num_created BETWEEN _first_block AND _last_block
ORDER BY h1.depth DESC
) s
@ -45,7 +45,7 @@ WITH recursive tblChild AS
, p.id as id
, p.depth as depth
, tblChild.delta as delta
FROM hive_posts p
FROM hivemind_app.hive_posts p
JOIN tblChild ON p.id = tblChild.queried_parent
WHERE p.depth < tblChild.depth
)
@ -61,53 +61,53 @@ WHERE uhp.id = data_source.queried_parent
END
$BODY$;
DROP FUNCTION IF EXISTS public.update_all_hive_posts_children_count;
CREATE OR REPLACE FUNCTION public.update_all_hive_posts_children_count()
DROP FUNCTION IF EXISTS hivemind_app.update_all_hive_posts_children_count;
CREATE OR REPLACE FUNCTION hivemind_app.update_all_hive_posts_children_count()
RETURNS void
LANGUAGE 'plpgsql'
VOLATILE
AS $BODY$
declare __depth INT;
BEGIN
SELECT MAX(hp.depth) into __depth FROM hive_posts hp ;
SELECT MAX(hp.depth) into __depth FROM hivemind_app.hive_posts hp ;
CREATE UNLOGGED TABLE IF NOT EXISTS __post_children
CREATE UNLOGGED TABLE IF NOT EXISTS hivemind_app.__post_children
(
id INT NOT NULL,
child_count INT NOT NULL,
CONSTRAINT __post_children_pkey PRIMARY KEY (id)
);
TRUNCATE TABLE __post_children;
TRUNCATE TABLE hivemind_app.__post_children;
WHILE __depth >= 0 LOOP
INSERT INTO __post_children
INSERT INTO hivemind_app.__post_children
(id, child_count)
SELECT
h1.parent_id AS queried_parent,
SUM(COALESCE((SELECT pc.child_count FROM __post_children pc WHERE pc.id = h1.id),
SUM(COALESCE((SELECT pc.child_count FROM hivemind_app.__post_children pc WHERE pc.id = h1.id),
0
) + 1
) AS count
FROM hive_posts h1
FROM hivemind_app.hive_posts h1
WHERE (h1.parent_id != 0 OR __depth = 0) AND h1.counter_deleted = 0 AND h1.id != 0 AND h1.depth = __depth
GROUP BY h1.parent_id
ON CONFLICT ON CONSTRAINT __post_children_pkey DO UPDATE
SET child_count = __post_children.child_count + excluded.child_count
SET child_count = hivemind_app.__post_children.child_count + excluded.child_count
;
__depth := __depth -1;
END LOOP;
UPDATE hive_posts uhp
UPDATE hivemind_app.hive_posts uhp
SET children = s.child_count
FROM
__post_children s
hivemind_app.__post_children s
WHERE s.id = uhp.id and s.child_count != uhp.children
;
TRUNCATE TABLE __post_children;
TRUNCATE TABLE hivemind_app.__post_children;
END
$BODY$;

View File

@ -1,5 +1,5 @@
DROP FUNCTION IF EXISTS update_hive_posts_mentions(INTEGER, INTEGER);
CREATE OR REPLACE FUNCTION update_hive_posts_mentions(in _first_block INTEGER, in _last_block INTEGER)
DROP FUNCTION IF EXISTS hivemind_app.update_hive_posts_mentions(INTEGER, INTEGER);
CREATE OR REPLACE FUNCTION hivemind_app.update_hive_posts_mentions(in _first_block INTEGER, in _last_block INTEGER)
RETURNS VOID
LANGUAGE 'plpgsql'
AS
@ -12,10 +12,10 @@ BEGIN
_first_block = _last_block - __block_limit;
END IF;
INSERT INTO hive_mentions( post_id, account_id, block_num )
INSERT INTO hivemind_app.hive_mentions( post_id, account_id, block_num )
SELECT DISTINCT T.id_post, ha.id, T.block_num
FROM
hive_accounts ha
hivemind_app.hive_accounts ha
INNER JOIN
(
SELECT T.id_post, LOWER( ( SELECT trim( T.mention::text, '{""}') ) ) AS mention, T.author_id, T.block_num
@ -23,8 +23,8 @@ BEGIN
(
SELECT
hp.id, REGEXP_MATCHES( hpd.body, '(?:^|[^a-zA-Z0-9_!#$%&*@\\/])(?:@)([a-zA-Z0-9\\.-]{1,16}[a-zA-Z0-9])(?![a-z])', 'g') AS mention, hp.author_id, hp.block_num
FROM hive_posts hp
INNER JOIN hive_post_data hpd ON hp.id = hpd.id
FROM hivemind_app.hive_posts hp
INNER JOIN hivemind_app.hive_post_data hpd ON hp.id = hpd.id
WHERE hp.block_num >= _first_block
)T( id_post, mention, author_id, block_num )
)T( id_post, mention, author_id, block_num ) ON ha.name = T.mention

View File

@ -1,7 +1,7 @@
DROP FUNCTION IF EXISTS update_posts_rshares;
CREATE OR REPLACE FUNCTION update_posts_rshares(
_first_block hive_blocks.num%TYPE
, _last_block hive_blocks.num%TYPE
DROP FUNCTION IF EXISTS hivemind_app.update_posts_rshares;
CREATE OR REPLACE FUNCTION hivemind_app.update_posts_rshares(
_first_block hive.hivemind_app_blocks_view.num%TYPE
, _last_block hive.hivemind_app_blocks_view.num%TYPE
)
RETURNS VOID
LANGUAGE 'plpgsql'
@ -12,12 +12,12 @@ BEGIN
SET LOCAL work_mem='2GB';
IF (_last_block - _first_block) > 10000 THEN
UPDATE hive_posts hp
UPDATE hivemind_app.hive_posts hp
SET
abs_rshares = votes_rshares.abs_rshares
,vote_rshares = votes_rshares.rshares
,sc_hot = CASE hp.is_paidout WHEN True Then 0 ELSE calculate_hot( votes_rshares.rshares, hp.created_at) END
,sc_trend = CASE hp.is_paidout WHEN True Then 0 ELSE calculate_trending( votes_rshares.rshares, hp.created_at) END
,sc_hot = CASE hp.is_paidout WHEN True Then 0 ELSE hivemind_app.calculate_hot( votes_rshares.rshares, hp.created_at) END
,sc_trend = CASE hp.is_paidout WHEN True Then 0 ELSE hivemind_app.calculate_trending( votes_rshares.rshares, hp.created_at) END
,total_votes = votes_rshares.total_votes
,net_votes = votes_rshares.net_votes
FROM
@ -32,7 +32,7 @@ IF (_last_block - _first_block) > 10000 THEN
WHEN hv.rshares = 0 THEN 0
ELSE -1
END ) as net_votes
FROM hive_votes hv
FROM hivemind_app.hive_votes hv
GROUP BY hv.post_id
) as votes_rshares
WHERE hp.id = votes_rshares.post_id
@ -44,12 +44,12 @@ IF (_last_block - _first_block) > 10000 THEN
OR hp.net_votes != votes_rshares.net_votes
);
ELSE
UPDATE hive_posts hp
UPDATE hivemind_app.hive_posts hp
SET
abs_rshares = votes_rshares.abs_rshares
,vote_rshares = votes_rshares.rshares
,sc_hot = CASE hp.is_paidout WHEN True Then 0 ELSE calculate_hot( votes_rshares.rshares, hp.created_at) END
,sc_trend = CASE hp.is_paidout WHEN True Then 0 ELSE calculate_trending( votes_rshares.rshares, hp.created_at) END
,sc_hot = CASE hp.is_paidout WHEN True Then 0 ELSE hivemind_app.calculate_hot( votes_rshares.rshares, hp.created_at) END
,sc_trend = CASE hp.is_paidout WHEN True Then 0 ELSE hivemind_app.calculate_trending( votes_rshares.rshares, hp.created_at) END
,total_votes = votes_rshares.total_votes
,net_votes = votes_rshares.net_votes
FROM
@ -64,11 +64,11 @@ ELSE
WHEN hv.rshares = 0 THEN 0
ELSE -1
END ) as net_votes
FROM hive_votes hv
FROM hivemind_app.hive_votes hv
WHERE EXISTS
(
SELECT NULL
FROM hive_votes hv2
FROM hivemind_app.hive_votes hv2
WHERE hv2.post_id = hv.post_id AND hv2.block_num BETWEEN _first_block AND _last_block
)
GROUP BY hv.post_id

View File

@ -1,2 +1,2 @@
ALTER TABLE hive_posts ALTER COLUMN tags_ids SET STATISTICS 1000;
ALTER TABLE hivemind_app.hive_posts ALTER COLUMN tags_ids SET STATISTICS 1000;

View File

@ -1,4 +1,4 @@
SELECT COALESCE((SELECT hd.vacuum_needed FROM hive_db_vacuum_needed hd WHERE hd.vacuum_needed LIMIT 1), False) AS needs_vacuum
SELECT COALESCE((SELECT hd.vacuum_needed FROM hivemind_app.hive_db_vacuum_needed hd WHERE hd.vacuum_needed LIMIT 1), False) AS needs_vacuum
\gset
\if :needs_vacuum
\qecho Running VACUUM on the database

View File

@ -1,6 +1,6 @@
START TRANSACTION;
insert into hive_db_patch_level
insert into hivemind_app.hive_db_patch_level
(level, patch_date, patched_to_revision)
select ds.level, ds.patch_date, ds.patch_revision
from
@ -10,6 +10,6 @@ values
,(34, now(), '9d2cc15bea71a39139abdf49569e0eac6dd0b970') -- https://gitlab.syncad.com/hive/hivemind/-/merge_requests/575
) ds (level, patch_date, patch_revision)
where not exists (select null from hive_db_patch_level hpl where hpl.patched_to_revision = ds.patch_revision);
where not exists (select null from hivemind_app.hive_db_patch_level hpl where hpl.patched_to_revision = ds.patch_revision);
COMMIT;

View File

@ -1,179 +1,4 @@
--- Put runtime data migration code here
START TRANSACTION;
DO
$BODY$
BEGIN
SET work_mem='2GB';
IF EXISTS(SELECT * FROM hive_db_data_migration WHERE migration = 'Reputation calculation') THEN
RAISE NOTICE 'Performing initial account reputation calculation...';
PERFORM update_account_reputations(NULL, NULL, True);
ELSE
RAISE NOTICE 'Skipping initial account reputation calculation...';
END IF;
END
$BODY$;
COMMIT;
START TRANSACTION;
DO
$BODY$
BEGIN
IF EXISTS(SELECT * FROM hive_db_data_migration WHERE migration = 'hive_posts_api_helper fill') THEN
RAISE NOTICE 'Performing initial hive_posts_api_helper collection...';
SET work_mem='2GB';
TRUNCATE TABLE hive_posts_api_helper;
DROP INDEX IF EXISTS hive_posts_api_helper_author_permlink_idx;
DROP INDEX IF EXISTS hive_posts_api_helper_author_s_permlink_idx;
PERFORM update_hive_posts_api_helper(NULL, NULL);
CREATE INDEX IF NOT EXISTS hive_posts_api_helper_author_s_permlink_idx ON hive_posts_api_helper (author_s_permlink);
ELSE
RAISE NOTICE 'Skipping initial hive_posts_api_helper collection...';
END IF;
END
$BODY$;
COMMIT;
START TRANSACTION;
DO
$BODY$
BEGIN
IF EXISTS(SELECT * FROM hive_db_data_migration WHERE migration = 'hive_mentions fill') THEN
RAISE NOTICE 'Performing initial post body mentions collection...';
SET work_mem='2GB';
DROP INDEX IF EXISTS hive_mentions_block_num_idx;
PERFORM update_hive_posts_mentions(0, (SELECT hb.num FROM hive_blocks hb ORDER BY hb.num DESC LIMIT 1) );
CREATE INDEX IF NOT EXISTS hive_mentions_block_num_idx ON hive_mentions (block_num);
ELSE
RAISE NOTICE 'Skipping initial post body mentions collection...';
END IF;
END
$BODY$;
COMMIT;
START TRANSACTION;
DO
$BODY$
BEGIN
IF EXISTS (SELECT * FROM hive_db_data_migration WHERE migration = 'update_posts_rshares( 0, head_block_number) execution') THEN
RAISE NOTICE 'Performing posts rshares, hot and trend recalculation on range ( 0, head_block_number)...';
SET work_mem='2GB';
PERFORM update_posts_rshares(0, (SELECT hb.num FROM hive_blocks hb ORDER BY hb.num DESC LIMIT 1) );
DELETE FROM hive_db_data_migration WHERE migration = 'update_posts_rshares( 0, head_block_number) execution';
ELSE
RAISE NOTICE 'Skipping update_posts_rshares( 0, head_block_number) recalculation...';
END IF;
END
$BODY$;
COMMIT;
START TRANSACTION;
DO
$BODY$
BEGIN
IF EXISTS (SELECT * FROM hive_db_data_migration WHERE migration = 'update_hive_posts_children_count execution') THEN
RAISE NOTICE 'Performing initial post children count execution ( 0, head_block_number)...';
SET work_mem='2GB';
update hive_posts set children = 0 where children != 0;
PERFORM update_all_hive_posts_children_count();
DELETE FROM hive_db_data_migration WHERE migration = 'update_hive_posts_children_count execution';
ELSE
RAISE NOTICE 'Skipping initial post children count execution ( 0, head_block_number) recalculation...';
END IF;
END
$BODY$;
COMMIT;
START TRANSACTION;
DO
$BODY$
BEGIN
IF EXISTS (SELECT * FROM hive_db_data_migration WHERE migration = 'update_hive_post_mentions refill execution') THEN
RAISE NOTICE 'Performing hive_mentions refill...';
SET work_mem='2GB';
TRUNCATE TABLE hive_mentions RESTART IDENTITY;
PERFORM update_hive_posts_mentions(0, (select max(num) from hive_blocks));
DELETE FROM hive_db_data_migration WHERE migration = 'update_hive_post_mentions refill execution';
ELSE
RAISE NOTICE 'Skipping hive_mentions refill...';
END IF;
END
$BODY$;
COMMIT;
START TRANSACTION;
DO
$BODY$
BEGIN
-- Also covers previous changes at a80c7642a1f3b08997af7e8a9915c13d34b7f0e0
-- Also covers previous changes at b100db27f37dda3c869c2756d99ab2856f7da9f9
-- Also covers previous changes at bd83414409b7624e2413b97a62fa7d97d83edd86
IF NOT EXISTS (SELECT * FROM hive_db_patch_level WHERE patched_to_revision = '1cc9981679157e4e54e5e4a74cca1feb5d49296d')
THEN
RAISE NOTICE 'Performing notification cache initial fill...';
SET work_mem='2GB';
PERFORM update_notification_cache(NULL, NULL, False);
DELETE FROM hive_db_data_migration WHERE migration = 'Notification cache initial fill';
ELSE
RAISE NOTICE 'Skipping notification cache initial fill...';
END IF;
END
$BODY$;
COMMIT;
START TRANSACTION;
DO
$BODY$
BEGIN
SET work_mem='2GB';
IF NOT EXISTS(SELECT * FROM hive_db_patch_level WHERE patched_to_revision = 'cce7fe54a2242b7a80354ee7e50e5b3275a2b039') THEN
RAISE NOTICE 'Performing reputation livesync recalculation...';
--- reputations have to be recalculated from scratch.
UPDATE hive_accounts SET reputation = 0, is_implicit = True;
PERFORM update_account_reputations(NULL, NULL, True);
INSERT INTO hive_db_vacuum_needed
(vacuum_needed)
values
(True)
;
ELSE
RAISE NOTICE 'Skipping reputation livesync recalculation...';
END IF;
END
$BODY$;
COMMIT;
START TRANSACTION;
DO
$BODY$
BEGIN
SET work_mem='2GB';
IF NOT EXISTS(SELECT * FROM hive_db_patch_level WHERE patched_to_revision = '33dd5e52673335284c6aa28ee89a069f83bd2dc6') THEN
RAISE NOTICE 'Performing reputation data cleanup...';
PERFORM truncate_account_reputation_data('30 days'::interval);
INSERT INTO hive_db_vacuum_needed
(vacuum_needed)
values
(True)
;
ELSE
RAISE NOTICE 'Skipping reputation data cleanup...';
END IF;
END
$BODY$;
COMMIT;
TRUNCATE TABLE hive_db_data_migration;
--- Must be at the end
TRUNCATE TABLE hivemind_app.hive_db_data_migration;

View File

@ -1,45 +1,31 @@
do $$
BEGIN
ASSERT EXISTS (SELECT * FROM pg_extension WHERE extname='intarray'), 'The database requires created "intarray" extension';
ASSERT EXISTS (SELECT * FROM pg_extension WHERE extname='intarray'), 'The database requires created "intarray" extension';
ASSERT (SELECT setting FROM pg_settings where name='join_collapse_limit' and source='database')::int = 16, 'Bad optimizer settings, use setup_db.sh script to setup target database correctly';
ASSERT (SELECT setting FROM pg_settings where name='from_collapse_limit' and source='database')::int = 16, 'Bad optimizer settings, use setup_db.sh script to setup target database correctly';
ASSERT (SELECT setting FROM pg_settings where name='jit' and source='database')::BOOLEAN = False, 'Bad optimizer settings, use setup_db.sh script to setup target database correctly';
END$$;
CREATE TABLE IF NOT EXISTS hive_db_patch_level
CREATE TABLE IF NOT EXISTS hivemind_app.hive_db_patch_level
(
level SERIAL NOT NULL PRIMARY KEY,
patch_date timestamp without time zone NOT NULL,
patched_to_revision TEXT
);
CREATE TABLE IF NOT EXISTS hive_db_data_migration
CREATE TABLE IF NOT EXISTS hivemind_app.hive_db_data_migration
(
migration varchar(128) not null
);
CREATE TABLE IF NOT EXISTS hive_db_vacuum_needed
CREATE TABLE IF NOT EXISTS hivemind_app.hive_db_vacuum_needed
(
vacuum_needed BOOLEAN NOT NULL
);
TRUNCATE TABLE hive_db_vacuum_needed;
TRUNCATE TABLE hivemind_app.hive_db_vacuum_needed;
DO $$
DECLARE
__version INT;
BEGIN
SELECT CURRENT_SETTING('server_version_num')::INT INTO __version;
EXECUTE 'ALTER DATABASE '||current_database()||' SET join_collapse_limit TO 16';
EXECUTE 'ALTER DATABASE '||current_database()||' SET from_collapse_limit TO 16';
IF __version >= 120000 THEN
RAISE NOTICE 'Disabling a JIT optimization on the current database level...';
EXECUTE 'ALTER DATABASE '||current_database()||' SET jit TO False';
END IF;
END
$$;
SHOW join_collapse_limit;
SHOW from_collapse_limit;
--- Put schema upgrade code here.
--- ####################################### 1.26 release upgrades #######################################
@ -47,10 +33,10 @@ SHOW from_collapse_limit;
--- Changes done in index hive_posts_community_id_id_idx overwritted by MR 575 (see below)
DROP INDEX IF EXISTS hive_posts_community_id_is_pinned_idx;
DROP INDEX IF EXISTS hivemind_app.hive_posts_community_id_is_pinned_idx;
CREATE INDEX IF NOT EXISTS hive_posts_community_id_is_pinned_idx
ON public.hive_posts USING btree
CREATE INDEX IF NOT EXISTS hivemind_app.hive_posts_community_id_is_pinned_idx
ON hivemind_app.hive_posts USING btree
(community_id ASC NULLS LAST)
INCLUDE(id)
WHERE is_pinned AND counter_deleted = 0;
@ -59,63 +45,63 @@ CREATE INDEX IF NOT EXISTS hive_posts_community_id_is_pinned_idx
--- Begin of MR https://gitlab.syncad.com/hive/hivemind/-/merge_requests/575 ---
DROP INDEX IF EXISTS hive_posts_community_id_id_idx;
DROP INDEX IF EXISTS hivemind_app.hive_posts_community_id_id_idx;
CREATE INDEX IF NOT EXISTS hive_posts_community_id_id_idx
ON public.hive_posts USING btree
CREATE INDEX IF NOT EXISTS hivemind_app.hive_posts_community_id_id_idx
ON hivemind_app.hive_posts USING btree
(community_id ASC NULLS LAST, id DESC)
WHERE counter_deleted = 0
;
--- dedicated to bridge_get_ranked_post_by_created_for_community
CREATE INDEX IF NOT EXISTS hive_posts_community_id_not_is_pinned_idx
ON public.hive_posts USING btree
CREATE INDEX IF NOT EXISTS hivemind_app.hive_posts_community_id_not_is_pinned_idx
ON hivemind_app.hive_posts USING btree
(community_id, id DESC)
WHERE NOT is_pinned and depth = 0 and counter_deleted = 0
;
--- Specific to bridge_get_ranked_post_by_trends_for_community
CREATE INDEX IF NOT EXISTS hive_posts_community_id_not_is_paidout_idx
ON public.hive_posts USING btree
CREATE INDEX IF NOT EXISTS hivemind_app.hive_posts_community_id_not_is_paidout_idx
ON hivemind_app.hive_posts USING btree
(community_id)
INCLUDE (id)
WHERE NOT is_paidout AND depth = 0 AND counter_deleted = 0
;
DROP INDEX IF EXISTS hive_posts_author_id_id_idx;
DROP INDEX IF EXISTS hivemind_app.hive_posts_author_id_id_idx;
CREATE INDEX IF NOT EXISTS hive_posts_author_id_id_idx
ON public.hive_posts USING btree
CREATE INDEX IF NOT EXISTS hivemind_app.hive_posts_author_id_id_idx
ON hivemind_app.hive_posts USING btree
(author_id, id DESC)
WHERE counter_deleted = 0
;
DROP INDEX IF EXISTS hive_follows_following_state_idx;
DROP INDEX IF EXISTS hivemind_app.hive_follows_following_state_idx;
CREATE INDEX IF NOT EXISTS hive_follows_following_state_idx
ON public.hive_follows USING btree
CREATE INDEX IF NOT EXISTS hivemind_app.hive_follows_following_state_idx
ON hivemind_app.hive_follows USING btree
(following, state)
;
DROP INDEX IF EXISTS hive_follows_follower_state_idx;
DROP INDEX IF EXISTS hivemind_app.hive_follows_follower_state_idx;
CREATE INDEX IF NOT EXISTS hive_follows_follower_state_idx
ON public.hive_follows USING btree
CREATE INDEX IF NOT EXISTS hivemind_app.hive_follows_follower_state_idx
ON hivemind_app.hive_follows USING btree
(follower, state)
;
DROP INDEX IF EXISTS hive_follows_follower_following_state_idx;
DROP INDEX IF EXISTS hivemind_app.hive_follows_follower_following_state_idx;
CREATE INDEX IF NOT EXISTS hive_follows_follower_following_state_idx
ON public.hive_follows USING btree
CREATE INDEX IF NOT EXISTS hivemind_app.hive_follows_follower_following_state_idx
ON hivemind_app.hive_follows USING btree
(follower, following, state)
;
DROP INDEX IF EXISTS hive_feed_cache_account_id_created_at_post_id_idx;
DROP INDEX IF EXISTS hivemind_app.hive_feed_cache_account_id_created_at_post_id_idx;
--- Dedicated index to bridge_get_account_posts_by_blog
CREATE INDEX IF NOT EXISTS hive_feed_cache_account_id_created_at_post_id_idx
ON public.hive_feed_cache
CREATE INDEX IF NOT EXISTS hivemind_app.hive_feed_cache_account_id_created_at_post_id_idx
ON hivemind_app.hive_feed_cache
(account_id, created_at DESC, post_id DESC)
;

View File

@ -1,5 +1,5 @@
DROP FUNCTION IF EXISTS public.max_time_stamp() CASCADE;
CREATE OR REPLACE FUNCTION public.max_time_stamp( _first TIMESTAMP, _second TIMESTAMP )
DROP FUNCTION IF EXISTS hivemind_app.max_time_stamp() CASCADE;
CREATE OR REPLACE FUNCTION hivemind_app.max_time_stamp( _first TIMESTAMP, _second TIMESTAMP )
RETURNS TIMESTAMP
LANGUAGE 'plpgsql'
IMMUTABLE
@ -13,11 +13,11 @@ BEGIN
END
$BODY$;
DROP FUNCTION IF EXISTS find_comment_id(character varying, character varying, boolean)
DROP FUNCTION IF EXISTS hivemind_app.find_comment_id(character varying, character varying, boolean)
;
CREATE OR REPLACE FUNCTION find_comment_id(
in _author hive_accounts.name%TYPE,
in _permlink hive_permlink_data.permlink%TYPE,
CREATE OR REPLACE FUNCTION hivemind_app.find_comment_id(
in _author hivemind_app.hive_accounts.name%TYPE,
in _permlink hivemind_app.hive_permlink_data.permlink%TYPE,
in _check boolean)
RETURNS INT
LANGUAGE 'plpgsql'
@ -29,17 +29,17 @@ BEGIN
IF (_author <> '' OR _permlink <> '') THEN
SELECT INTO __post_id COALESCE( (
SELECT hp.id
FROM hive_posts hp
JOIN hive_accounts ha ON ha.id = hp.author_id
JOIN hive_permlink_data hpd ON hpd.id = hp.permlink_id
FROM hivemind_app.hive_posts hp
JOIN hivemind_app.hive_accounts ha ON ha.id = hp.author_id
JOIN hivemind_app.hive_permlink_data hpd ON hpd.id = hp.permlink_id
WHERE ha.name = _author AND hpd.permlink = _permlink AND hp.counter_deleted = 0
), 0 );
IF _check AND __post_id = 0 THEN
SELECT INTO __post_id (
SELECT COUNT(hp.id)
FROM hive_posts hp
JOIN hive_accounts ha ON ha.id = hp.author_id
JOIN hive_permlink_data hpd ON hpd.id = hp.permlink_id
FROM hivemind_app.hive_posts hp
JOIN hivemind_app.hive_accounts ha ON ha.id = hp.author_id
JOIN hivemind_app.hive_permlink_data hpd ON hpd.id = hp.permlink_id
WHERE ha.name = _author AND hpd.permlink = _permlink
);
IF __post_id = 0 THEN
@ -54,10 +54,10 @@ END
$function$
;
DROP FUNCTION IF EXISTS find_account_id(character varying, boolean)
DROP FUNCTION IF EXISTS hivemind_app.find_account_id(character varying, boolean)
;
CREATE OR REPLACE FUNCTION find_account_id(
in _account hive_accounts.name%TYPE,
CREATE OR REPLACE FUNCTION hivemind_app.find_account_id(
in _account hivemind_app.hive_accounts.name%TYPE,
in _check boolean)
RETURNS INT
LANGUAGE 'plpgsql'
@ -67,7 +67,7 @@ DECLARE
__account_id INT = 0;
BEGIN
IF (_account <> '') THEN
SELECT INTO __account_id COALESCE( ( SELECT id FROM hive_accounts WHERE name=_account ), 0 );
SELECT INTO __account_id COALESCE( ( SELECT id FROM hivemind_app.hive_accounts WHERE name=_account ), 0 );
IF _check AND __account_id = 0 THEN
RAISE EXCEPTION 'Account % does not exist', _account USING ERRCODE = 'CEHM4';
END IF;
@ -77,10 +77,10 @@ END
$function$
;
DROP FUNCTION IF EXISTS public.find_tag_id CASCADE
DROP FUNCTION IF EXISTS hivemind_app.find_tag_id CASCADE
;
CREATE OR REPLACE FUNCTION public.find_tag_id(
in _tag_name hive_tag_data.tag%TYPE,
CREATE OR REPLACE FUNCTION hivemind_app.find_tag_id(
in _tag_name hivemind_app.hive_tag_data.tag%TYPE,
in _check BOOLEAN
)
RETURNS INTEGER
@ -91,7 +91,7 @@ DECLARE
__tag_id INT = 0;
BEGIN
IF (_tag_name <> '') THEN
SELECT INTO __tag_id COALESCE( ( SELECT id FROM hive_tag_data WHERE tag=_tag_name ), 0 );
SELECT INTO __tag_id COALESCE( ( SELECT id FROM hivemind_app.hive_tag_data WHERE tag=_tag_name ), 0 );
IF _check AND __tag_id = 0 THEN
RAISE EXCEPTION 'Tag % does not exist', _tag_name USING ERRCODE = 'CEHM5';
END IF;
@ -101,10 +101,10 @@ END
$function$
;
DROP FUNCTION IF EXISTS public.find_category_id CASCADE
DROP FUNCTION IF EXISTS hivemind_app.find_category_id CASCADE
;
CREATE OR REPLACE FUNCTION public.find_category_id(
in _category_name hive_category_data.category%TYPE,
CREATE OR REPLACE FUNCTION hivemind_app.find_category_id(
in _category_name hivemind_app.hive_category_data.category%TYPE,
in _check BOOLEAN
)
RETURNS INTEGER
@ -115,7 +115,7 @@ DECLARE
__category_id INT = 0;
BEGIN
IF (_category_name <> '') THEN
SELECT INTO __category_id COALESCE( ( SELECT id FROM hive_category_data WHERE category=_category_name ), 0 );
SELECT INTO __category_id COALESCE( ( SELECT id FROM hivemind_app.hive_category_data WHERE category=_category_name ), 0 );
IF _check AND __category_id = 0 THEN
RAISE EXCEPTION 'Category % does not exist', _category_name USING ERRCODE = 'CEHM6';
END IF;
@ -125,10 +125,10 @@ END
$function$
;
DROP FUNCTION IF EXISTS public.find_community_id CASCADE
DROP FUNCTION IF EXISTS hivemind_app.find_community_id CASCADE
;
CREATE OR REPLACE FUNCTION public.find_community_id(
in _community_name hive_communities.name%TYPE,
CREATE OR REPLACE FUNCTION hivemind_app.find_community_id(
in _community_name hivemind_app.hive_communities.name%TYPE,
in _check BOOLEAN
)
RETURNS INTEGER
@ -139,7 +139,7 @@ DECLARE
__community_id INT = 0;
BEGIN
IF (_community_name <> '') THEN
SELECT INTO __community_id COALESCE( ( SELECT id FROM hive_communities WHERE name=_community_name ), 0 );
SELECT INTO __community_id COALESCE( ( SELECT id FROM hivemind_app.hive_communities WHERE name=_community_name ), 0 );
IF _check AND __community_id = 0 THEN
RAISE EXCEPTION 'Community % does not exist', _community_name USING ERRCODE = 'CEHM7';
END IF;
@ -150,9 +150,9 @@ $function$
;
--Maybe better to convert roles to ENUM
DROP FUNCTION IF EXISTS get_role_name
DROP FUNCTION IF EXISTS hivemind_app.get_role_name
;
CREATE OR REPLACE FUNCTION get_role_name(in _role_id INT)
CREATE OR REPLACE FUNCTION hivemind_app.get_role_name(in _role_id INT)
RETURNS VARCHAR
LANGUAGE 'plpgsql'
AS
@ -171,25 +171,25 @@ END
$function$
;
DROP FUNCTION IF EXISTS is_pinned
DROP FUNCTION IF EXISTS hivemind_app.is_pinned
;
CREATE OR REPLACE FUNCTION is_pinned(in _post_id INT)
CREATE OR REPLACE FUNCTION hivemind_app.is_pinned(in _post_id INT)
RETURNS boolean
LANGUAGE 'plpgsql'
AS
$function$
BEGIN
RETURN is_pinned FROM hive_posts WHERE id = _post_id LIMIT 1
RETURN is_pinned FROM hivemind_app.hive_posts WHERE id = _post_id LIMIT 1
;
END
$function$
;
DROP FUNCTION IF EXISTS public.find_subscription_id CASCADE
DROP FUNCTION IF EXISTS hivemind_app.find_subscription_id CASCADE
;
CREATE OR REPLACE FUNCTION public.find_subscription_id(
in _account hive_accounts.name%TYPE,
in _community_name hive_communities.name%TYPE,
CREATE OR REPLACE FUNCTION hivemind_app.find_subscription_id(
in _account hivemind_app.hive_accounts.name%TYPE,
in _community_name hivemind_app.hive_communities.name%TYPE,
in _check BOOLEAN
)
RETURNS INTEGER
@ -201,9 +201,9 @@ DECLARE
BEGIN
IF (_account <> '') THEN
SELECT INTO __subscription_id COALESCE( (
SELECT hs.id FROM hive_subscriptions hs
JOIN hive_accounts ha ON ha.id = hs.account_id
JOIN hive_communities hc ON hc.id = hs.community_id
SELECT hs.id FROM hivemind_app.hive_subscriptions hs
JOIN hivemind_app.hive_accounts ha ON ha.id = hs.account_id
JOIN hivemind_app.hive_communities hc ON hc.id = hs.community_id
WHERE ha.name = _account AND hc.name = _community_name
), 0 );
IF _check AND __subscription_id = 0 THEN
@ -214,3 +214,39 @@ BEGIN
END
$function$
;
DROP TYPE IF EXISTS hivemind_app.head_state CASCADE;
CREATE TYPE hivemind_app.head_state AS
(
num int,
created_at timestamp,
age int
);
DROP FUNCTION IF EXISTS hivemind_app.get_head_state();
CREATE OR REPLACE FUNCTION hivemind_app.get_head_state()
RETURNS SETOF hivemind_app.head_state
LANGUAGE 'plpgsql'
AS
$$
DECLARE
__num int;
__created_at timestamp := '1970-01-01 00:00:00'::timestamp;
__record hivemind_app.head_state;
BEGIN
SELECT current_block_num INTO __num FROM hive.hivemind_app_context_data_view;
IF __num > 0 THEN
SELECT created_at INTO __created_at FROM hive.hivemind_app_blocks_view WHERE num = __num;
ELSE
-- MIGHT BE NULL
__num = 0;
END IF;
__record.num = __num;
__record.created_at = __created_at;
__record.age = extract(epoch from __created_at);
RETURN NEXT __record;
END
$$
;

View File

@ -2,6 +2,7 @@
import logging
from hive.conf import SCHEMA_NAME
from hive.db.adapter import Db
from hive.indexer.db_adapter_holder import DbAdapterHolder
from hive.utils.account import get_profile_str
@ -56,7 +57,7 @@ class Accounts(DbAdapterHolder):
def load_ids(cls):
"""Load a full (name: id) dict into memory."""
assert not cls._ids, "id map already loaded"
cls._ids = dict(DB.query_all("SELECT name, id FROM hive_accounts"))
cls._ids = dict(DB.query_all(f"SELECT name, id FROM {SCHEMA_NAME}.hive_accounts"))
@classmethod
def clear_ids(cls):
@ -131,7 +132,7 @@ class Accounts(DbAdapterHolder):
(_posting_json_metadata, _json_metadata) = get_profile_str(op_details)
sql = f"""
INSERT INTO hive_accounts (name, created_at, posting_json_metadata, json_metadata )
INSERT INTO {SCHEMA_NAME}.hive_accounts (name, created_at, posting_json_metadata, json_metadata )
VALUES ( '{name}', '{block_date}', {cls.get_json_data(_posting_json_metadata)}, {cls.get_json_data(_json_metadata)} )
RETURNING id
"""
@ -159,8 +160,8 @@ class Accounts(DbAdapterHolder):
if cls._updates_data:
cls.beginTx()
sql = """
UPDATE hive_accounts ha
sql = f"""
UPDATE {SCHEMA_NAME}.hive_accounts ha
SET
posting_json_metadata =
(
@ -181,7 +182,7 @@ class Accounts(DbAdapterHolder):
(
VALUES
-- allow_change_posting, posting_json_metadata, json_metadata, name
{}
{{}}
)T( allow_change_posting, posting_json_metadata, json_metadata, name )
)T2
WHERE ha.name = T2.name

View File

@ -1,20 +1,23 @@
from abc import ABC, abstractmethod
from enum import Enum
import logging
import queue
from hive.signals import can_continue_thread
log = logging.getLogger(__name__)
class VirtualOperationType(Enum):
AUTHOR_REWARD = 1
COMMENT_REWARD = 2
EFFECTIVE_COMMENT_VOTE = 3
COMMENT_PAYOUT_UPDATE = 4
INEFFECTIVE_DELETE_COMMENT = 5
# same ids as in hive.operation_types table
AUTHOR_REWARD = 51
COMMENT_REWARD = 53
COMMENT_PAYOUT_UPDATE = 61
EFFECTIVE_COMMENT_VOTE = 72
INEFFECTIVE_DELETE_COMMENT = 73
def from_name(operation_name):
@classmethod
def from_name(cls, operation_name: str):
if operation_name == 'author_reward_operation':
return VirtualOperationType.AUTHOR_REWARD
if operation_name == 'comment_reward_operation':
@ -30,21 +33,23 @@ class VirtualOperationType(Enum):
class OperationType(Enum):
POW = 1
POW_2 = 2
ACCOUNT_CREATE = 3
ACCOUNT_CREATE_WITH_DELEGATION = 4
CREATE_CLAIMED_ACCOUNT = 5
ACCOUNT_UPDATE = 6
ACCOUNT_UPDATE_2 = 7
COMMENT = 8
DELETE_COMMENT = 9
COMMENT_OPTION = 10
VOTE = 11
TRANSFER = 12
CUSTOM_JSON = 13
# same ids as in hive.operation_types table
VOTE = 0
COMMENT = 1
TRANSFER = 2
ACCOUNT_CREATE = 9
ACCOUNT_UPDATE = 10
POW = 14
DELETE_COMMENT = 17
CUSTOM_JSON = 18
COMMENT_OPTION = 19
CREATE_CLAIMED_ACCOUNT = 23
POW_2 = 30
ACCOUNT_CREATE_WITH_DELEGATION = 41
ACCOUNT_UPDATE_2 = 43
def from_name(operation_name):
@classmethod
def from_name(cls, operation_name: str):
if operation_name == 'pow_operation':
return OperationType.POW
if operation_name == 'pow2_operation':
@ -98,14 +103,6 @@ class Block(ABC):
def get_previous_block_hash(self):
pass
@abstractmethod
def get_number_of_transactions(self):
pass
@abstractmethod
def get_number_of_operations(self):
pass
@abstractmethod
def get_next_transaction(self):
pass
@ -154,36 +151,16 @@ class BlockWrapper(Block):
def get_previous_block_hash(self):
return self.wrapped_block.get_previous_block_hash()
def get_number_of_transactions(self):
return self.wrapped_block.get_number_of_transactions()
def get_number_of_operations(self):
return self.wrapped_block.get_number_of_operations()
def get_next_transaction(self):
return self.wrapped_block.get_next_transaction()
class BlocksProviderBase(ABC):
def __init__(self, breaker, exception_reporter):
"""
breaker - callable, returns true when sync can continue, false when break was requested
exception_reporter - callable, use to inform about undesire exception in a synchronizaton thread
"""
assert breaker
assert exception_reporter
self._breaker = breaker
self._exception_reporter = exception_reporter
def __init__(self):
self._blocks_queue_size = 1500
self._blocks_data_queue_size = 1500
self._operations_queue_size = 1500
def report_exception():
self._exception_reporter()
@abstractmethod
def start(self):
"""Shall start threads and returns lists of futures"""
@ -194,13 +171,12 @@ class BlocksProviderBase(ABC):
"""Returns lists of blocks"""
pass
def _get_from_queue(self, data_queue, number_of_elements):
@staticmethod
def _get_from_queue(data_queue, number_of_elements):
"""Tool function to get elements from queue"""
ret = []
for element in range(number_of_elements):
if not self._breaker():
break
while self._breaker():
while can_continue_thread():
try:
ret.append(data_queue.get(True, 1))
data_queue.task_done()

View File

@ -3,13 +3,17 @@
import concurrent
from concurrent.futures import ThreadPoolExecutor
import logging
from pathlib import Path
from time import perf_counter
from typing import Tuple
from hive.conf import Conf, SCHEMA_NAME
from hive.db.adapter import Db
from hive.indexer.accounts import Accounts
from hive.indexer.block import Block, Operation, OperationType, Transaction, VirtualOperationType
from hive.indexer.custom_op import CustomOp
from hive.indexer.follow import Follow
from hive.indexer.hive_db.block import BlockHiveDb
from hive.indexer.notify import Notify
from hive.indexer.payments import Payments
from hive.indexer.post_data_cache import PostDataCache
@ -19,6 +23,7 @@ from hive.indexer.reputations import Reputations
from hive.indexer.votes import Votes
from hive.server.common.mentions import Mentions
from hive.server.common.payout_stats import PayoutStats
from hive.utils.communities_rank import update_communities_posts_and_rank
from hive.utils.stats import FlushStatusManager as FSM
from hive.utils.stats import OPStatusManager as OPSM
from hive.utils.timer import time_it
@ -29,17 +34,16 @@ DB = Db.instance()
def time_collector(f):
startTime = FSM.start()
start_time = FSM.start()
result = f()
elapsedTime = FSM.stop(startTime)
return (result, elapsedTime)
elapsed_time = FSM.stop(start_time)
return result, elapsed_time
class Blocks:
"""Processes blocks, dispatches work, manages `hive_blocks` table."""
"""Processes blocks, dispatches work, manages the state of the database (blocks consistency, and numbers)."""
blocks_to_flush = []
_conf = None
_head_block_date = None
_current_block_date = None
_last_safe_cashout_block = 0
@ -56,30 +60,34 @@ class Blocks:
('Accounts', Accounts.flush, Accounts),
]
def __init__(cls):
head_date = cls.head_date()
def __init__(self):
head_date = self.head_date()
if head_date == '':
cls._head_block_date = None
cls._current_block_date = None
self.__class__._head_block_date = None
self.__class__._current_block_date = None
else:
cls._head_block_date = head_date
cls._current_block_date = head_date
self.__class__._head_block_date = head_date
self.__class__._current_block_date = head_date
@classmethod
def setup_own_db_access(cls, sharedDbAdapter):
PostDataCache.setup_own_db_access(sharedDbAdapter, "PostDataCache")
Reputations.setup_own_db_access(sharedDbAdapter, "Reputations")
Votes.setup_own_db_access(sharedDbAdapter, "Votes")
Follow.setup_own_db_access(sharedDbAdapter, "Follow")
Posts.setup_own_db_access(sharedDbAdapter, "Posts")
Reblog.setup_own_db_access(sharedDbAdapter, "Reblog")
Notify.setup_own_db_access(sharedDbAdapter, "Notify")
Accounts.setup_own_db_access(sharedDbAdapter, "Accounts")
PayoutStats.setup_own_db_access(sharedDbAdapter, "PayoutStats")
Mentions.setup_own_db_access(sharedDbAdapter, "Mentions")
def setup(cls, conf: Conf):
cls._conf = conf
@classmethod
def close_own_db_access(cls):
@staticmethod
def setup_own_db_access(shared_db_adapter: Db) -> None:
PostDataCache.setup_own_db_access(shared_db_adapter, "PostDataCache")
Reputations.setup_own_db_access(shared_db_adapter, "Reputations")
Votes.setup_own_db_access(shared_db_adapter, "Votes")
Follow.setup_own_db_access(shared_db_adapter, "Follow")
Posts.setup_own_db_access(shared_db_adapter, "Posts")
Reblog.setup_own_db_access(shared_db_adapter, "Reblog")
Notify.setup_own_db_access(shared_db_adapter, "Notify")
Accounts.setup_own_db_access(shared_db_adapter, "Accounts")
PayoutStats.setup_own_db_access(shared_db_adapter, "PayoutStats")
Mentions.setup_own_db_access(shared_db_adapter, "Mentions")
@staticmethod
def close_own_db_access() -> None:
PostDataCache.close_own_db_access()
Reputations.close_own_db_access()
Votes.close_own_db_access()
@ -91,20 +99,38 @@ class Blocks:
PayoutStats.close_own_db_access()
Mentions.close_own_db_access()
@classmethod
def head_num(cls):
"""Get hive's head block number."""
sql = "SELECT num FROM hive_blocks ORDER BY num DESC LIMIT 1"
@staticmethod
def head_num() -> int:
"""Get head block number from the application view (hive.hivemind_app_blocks_view)."""
sql = f"SELECT num FROM {SCHEMA_NAME}.get_head_state();"
return DB.query_one(sql) or 0
@classmethod
def head_date(cls):
@staticmethod
def last_imported() -> int:
"""
Get hivemind_app last block that was imported.
(could not be completed yet! which means there were no update queries run with this block number)
"""
sql = f"SELECT last_imported_block_num FROM {SCHEMA_NAME}.hive_state;"
return DB.query_one(sql) or 0
@staticmethod
def last_completed() -> int:
"""
Get hivemind_app last block that was completed.
(block is considered as completed when all update queries were run with this block number)
"""
sql = f"SELECT last_completed_block_num FROM {SCHEMA_NAME}.hive_state;"
return DB.query_one(sql) or 0
@staticmethod
def head_date() -> str:
"""Get hive's head block date."""
sql = "SELECT head_block_time()"
return str(DB.query_one(sql) or '')
@classmethod
def set_end_of_sync_lib(cls, lib):
def set_end_of_sync_lib(cls, lib: int) -> None:
"""Set last block that guarantees cashout before end of sync based on LIB"""
if lib < 10629455:
# posts created before HF17 could stay unpaid forever
@ -120,8 +146,8 @@ class Blocks:
)
@classmethod
def flush_data_in_n_threads(cls):
completedThreads = 0
def flush_data_in_n_threads(cls) -> None:
completed_threads = 0
pool = ThreadPoolExecutor(max_workers=len(cls._concurrent_flush))
flush_futures = {
@ -129,7 +155,7 @@ class Blocks:
}
for future in concurrent.futures.as_completed(flush_futures):
(description, c) = flush_futures[future]
completedThreads = completedThreads + 1
completed_threads = completed_threads + 1
try:
(n, elapsedTime) = future.result()
assert n is not None
@ -144,10 +170,10 @@ class Blocks:
raise exc
pool.shutdown()
assert completedThreads == len(cls._concurrent_flush)
assert completed_threads == len(cls._concurrent_flush)
@classmethod
def flush_data_in_1_thread(cls):
def flush_data_in_1_thread(cls) -> None:
for description, f, c in cls._concurrent_flush:
try:
f()
@ -156,14 +182,16 @@ class Blocks:
raise exc
@classmethod
def process_blocks(cls, blocks):
def process_blocks(cls, blocks) -> Tuple[int, int]:
last_num = 0
last_date = None
first_block = -1
try:
for block in blocks:
if first_block == -1:
first_block = block.get_num()
last_num = cls._process(block)
last_date = block.get_date()
except Exception as e:
log.error("exception encountered block %d", last_num + 1)
raise e
@ -172,44 +200,56 @@ class Blocks:
# expensive. So is tracking follows at all; hence we track
# deltas in memory and update follow/er counts in bulk.
flush_time = FSM.start()
def register_time(f_time, name, pushed):
assert pushed is not None
FSM.flush_stat(name, FSM.stop(f_time), pushed)
return FSM.start()
log.info("#############################################################################")
flush_time = register_time(flush_time, "Blocks", cls._flush_blocks())
sql = f'SELECT {SCHEMA_NAME}.update_last_imported_block(:last_num, :last_date);'
DB.query_no_return(sql, last_num=last_num, last_date=last_date)
return first_block, last_num
@classmethod
def process_multi(cls, blocks, is_initial_sync):
def process_multi(cls, blocks, is_massive_sync: bool) -> None:
"""Batch-process blocks; wrapped in a transaction."""
time_start = OPSM.start()
DB.query("START TRANSACTION")
if is_massive_sync:
DB.query("START TRANSACTION")
first_block, last_num = cls.process_blocks(blocks)
if not is_initial_sync:
if not is_massive_sync:
log.info("[PROCESS MULTI] Flushing data in 1 thread")
cls.flush_data_in_1_thread()
if first_block > -1:
log.info("[PROCESS MULTI] Tables updating in live synchronization")
cls.on_live_blocks_processed(first_block, last_num)
cls.on_live_blocks_processed(first_block)
cls._periodic_actions(blocks[0])
DB.query("COMMIT")
if is_initial_sync:
if is_massive_sync:
log.info("[PROCESS MULTI] Flushing data in N threads")
cls.flush_data_in_n_threads()
log.info(f"[PROCESS MULTI] {len(blocks)} blocks in {OPSM.stop(time_start) :.4f}s")
@staticmethod
def prepare_vops(comment_payout_ops, block, date, block_num, is_safe_cashout):
@classmethod
def _periodic_actions(cls, block: BlockHiveDb) -> None:
"""Actions performed at a given time, calculated on the basis of the current block number"""
if (block_num := block.get_num()) % 1200 == 0: # 1hour
log.info(f"head block {block_num} @ {block.get_date()}")
log.info("[SINGLE] hourly stats")
log.info("[SINGLE] filling payout_stats_view executed")
with ThreadPoolExecutor(max_workers=2) as executor:
executor.submit(PayoutStats.generate)
executor.submit(Mentions.refresh)
elif block_num % 200 == 0: # 10min
log.info("[SINGLE] 10min")
log.info("[SINGLE] updating communities posts and rank")
update_communities_posts_and_rank(db=DB)
@classmethod
def prepare_vops(cls, comment_payout_ops: dict, block: Block, date, block_num: int, is_safe_cashout: bool) -> dict:
def get_empty_ops():
return {
VirtualOperationType.AUTHOR_REWARD: None,
@ -221,8 +261,12 @@ class Blocks:
ineffective_deleted_ops = {}
for vop in block.get_next_vop():
if cls._conf.get('log_virtual_op_calls'):
with open(Path(__file__).parent.parent / 'virtual_operations.log', 'a', encoding='utf-8') as file:
file.write(f'{block.get_num()}: {vop.get_type()}')
file.write(str(vop.get_body()))
start = OPSM.start()
key = None
op_type = vop.get_type()
assert op_type
@ -279,11 +323,11 @@ class Blocks:
return ineffective_deleted_ops
@classmethod
def _process(cls, block):
def _process(cls, block: Block) -> int:
"""Process a single block. Assumes a trx is open."""
# pylint: disable=too-many-branches
assert issubclass(type(block), Block)
num = cls._push(block)
num = block.get_num()
cls._current_block_date = block.get_date()
# head block date shall point to last imported block (not yet current one) to conform hived behavior.
@ -304,6 +348,11 @@ class Blocks:
for operation in transaction.get_next_operation():
assert issubclass(type(operation), Operation)
if cls._conf.get('log_op_calls'):
with open(Path(__file__).parent.parent / 'operations.log', 'a', encoding='utf-8') as file:
file.write(f'{block.get_num()}: {operation.get_type()}')
file.write(str(operation.get_body()))
start = OPSM.start()
op_type = operation.get_type()
assert op_type, "Only supported types are expected"
@ -370,161 +419,25 @@ class Blocks:
return num
@classmethod
def verify_head(cls, steem):
"""Perform a fork recovery check on startup."""
hive_head = cls.head_num()
if not hive_head:
return
# move backwards from head until hive/steem agree
to_pop = []
cursor = hive_head
while True:
assert hive_head - cursor < 25, "fork too deep"
hive_block = cls._get(cursor)
steem_hash = steem.get_block(cursor)['block_id']
match = hive_block['hash'] == steem_hash
log.info(
"[INIT] fork check. block %d: %s vs %s --- %s",
hive_block['num'],
hive_block['hash'],
steem_hash,
'ok' if match else 'invalid',
)
if match:
break
to_pop.append(hive_block)
cursor -= 1
if hive_head == cursor:
return # no fork!
log.error("[FORK] depth is %d; popping blocks %d - %d", hive_head - cursor, cursor + 1, hive_head)
# we should not attempt to recover from fork until it's safe
fork_limit = steem.last_irreversible()
assert cursor < fork_limit, "not proceeding until head is irreversible"
cls._pop(to_pop)
@classmethod
def _get(cls, num):
"""Fetch a specific block."""
sql = """SELECT num, created_at date, hash
FROM hive_blocks WHERE num = :num LIMIT 1"""
return dict(DB.query_row(sql, num=num))
@classmethod
def _push(cls, block):
"""Insert a row in `hive_blocks`."""
cls.blocks_to_flush.append(
{
'num': block.get_num(),
'hash': block.get_hash(),
'prev': block.get_previous_block_hash(),
'txs': block.get_number_of_transactions(),
'ops': block.get_number_of_operations(),
'date': block.get_date(),
}
)
return block.get_num()
@classmethod
def _flush_blocks(cls):
query = """
INSERT INTO
hive_blocks (num, hash, prev, txs, ops, created_at, completed)
VALUES
"""
values = []
for block in cls.blocks_to_flush:
values.append(
f"({block['num']}, '{block['hash']}', '{block['prev']}', {block['txs']}, {block['ops']}, '{block['date']}', {False})"
)
query = query + ",".join(values)
DB.query_prepared(query)
values.clear()
n = len(cls.blocks_to_flush)
cls.blocks_to_flush.clear()
return n
@classmethod
def _pop(cls, blocks):
"""Pop head blocks to navigate head to a point prior to fork.
Without an undo database, there is a limit to how fully we can recover.
If consistency is critical, run hive with TRAIL_BLOCKS=-1 to only index
up to last irreversible. Otherwise use TRAIL_BLOCKS=2 to stay closer
while avoiding the vast majority of microforks.
As-is, there are a few caveats with the following strategy:
- follow counts can get out of sync (hive needs to force-recount)
- follow state could get out of sync (user-recoverable)
For 1.5, also need to handle:
- hive_communities
- hive_members
- hive_flags
- hive_modlog
"""
DB.query("START TRANSACTION")
for block in blocks:
num = block['num']
date = block['date']
log.warning("[FORK] popping block %d @ %s", num, date)
assert num == cls.head_num(), "can only pop head block"
# get all affected post_ids in this block
sql = "SELECT id FROM hive_posts WHERE created_at >= :date"
post_ids = tuple(DB.query_col(sql, date=date))
# remove all recent records -- communities
DB.query("DELETE FROM hive_notifs WHERE created_at >= :date", date=date)
DB.query("DELETE FROM hive_subscriptions WHERE created_at >= :date", date=date)
DB.query("DELETE FROM hive_roles WHERE created_at >= :date", date=date)
DB.query("DELETE FROM hive_communities WHERE created_at >= :date", date=date)
# remove all recent records -- core
DB.query("DELETE FROM hive_feed_cache WHERE created_at >= :date", date=date)
DB.query("DELETE FROM hive_reblogs WHERE created_at >= :date", date=date)
DB.query("DELETE FROM hive_follows WHERE created_at >= :date", date=date)
# remove posts: core, tags, cache entries
if post_ids:
DB.query("DELETE FROM hive_posts WHERE id IN :ids", ids=post_ids)
DB.query("DELETE FROM hive_post_data WHERE id IN :ids", ids=post_ids)
DB.query("DELETE FROM hive_payments WHERE block_num = :num", num=num)
DB.query("DELETE FROM hive_blocks WHERE num = :num", num=num)
DB.query("COMMIT")
log.warning("[FORK] recovery complete")
# TODO: manually re-process here the blocks which were just popped.
@classmethod
@staticmethod
@time_it
def on_live_blocks_processed(cls, first_block, last_block):
def on_live_blocks_processed(block_number: int) -> None:
"""Is invoked when processing of block range is done and received
informations from hived are already stored in db
"""
is_hour_action = last_block % 1200 == 0
is_hour_action = block_number % 1200 == 0
queries = [
f"SELECT update_posts_rshares({first_block}, {last_block})",
f"SELECT update_hive_posts_children_count({first_block}, {last_block})",
f"SELECT update_hive_posts_root_id({first_block},{last_block})",
f"SELECT update_hive_posts_api_helper({first_block},{last_block})",
f"SELECT update_feed_cache({first_block}, {last_block})",
f"SELECT update_hive_posts_mentions({first_block}, {last_block})",
f"SELECT update_notification_cache({first_block}, {last_block}, {is_hour_action})",
f"SELECT update_follow_count({first_block}, {last_block})",
f"SELECT update_account_reputations({first_block}, {last_block}, False)",
f"SELECT update_hive_blocks_consistency_flag({first_block}, {last_block})",
f"SELECT {SCHEMA_NAME}.update_posts_rshares({block_number}, {block_number})",
f"SELECT {SCHEMA_NAME}.update_hive_posts_children_count({block_number}, {block_number})",
f"SELECT {SCHEMA_NAME}.update_hive_posts_root_id({block_number},{block_number})",
f"SELECT {SCHEMA_NAME}.update_hive_posts_api_helper({block_number},{block_number})",
f"SELECT {SCHEMA_NAME}.update_feed_cache({block_number}, {block_number})",
f"SELECT {SCHEMA_NAME}.update_hive_posts_mentions({block_number}, {block_number})",
f"SELECT {SCHEMA_NAME}.update_notification_cache({block_number}, {block_number}, {is_hour_action})",
f"SELECT {SCHEMA_NAME}.update_follow_count({block_number}, {block_number})",
f"SELECT {SCHEMA_NAME}.update_account_reputations({block_number}, {block_number}, False)",
f"SELECT {SCHEMA_NAME}.update_last_completed_block({block_number})",
]
for query in queries:
@ -532,11 +445,15 @@ class Blocks:
DB.query_no_return(query)
log.info("%s executed in: %.4f s", query, perf_counter() - time_start)
@classmethod
def is_consistency(cls):
"""Check if all tuples in `hive_blocks` are written correctly.
If any record has `completed` == false, it indicates that the database was closed incorrectly or a rollback failed.
@staticmethod
def is_consistency() -> bool:
"""
not_completed_blocks = DB.query_one("SELECT count(*) FROM hive_blocks WHERE completed = false LIMIT 1")
log.info("[INIT] Number of not completed blocks: %s.", not_completed_blocks)
return not_completed_blocks == 0
Check if all tuples in are written correctly.
If there are any not_completed_blocks, it means that there were no update queries ran on these blocks.
"""
not_completed_blocks = Blocks.last_imported() - Blocks.last_completed()
if not_completed_blocks:
log.warning(f"Number of not completed blocks: {not_completed_blocks}")
return False
return True

View File

@ -8,6 +8,7 @@ import re
import ujson as json
from hive.conf import SCHEMA_NAME
from hive.db.adapter import Db
from hive.indexer.accounts import Accounts
from hive.indexer.notify import Notify
@ -129,12 +130,12 @@ class Community:
_id = Accounts.get_id(name)
# insert community
sql = """INSERT INTO hive_communities (id, name, type_id, created_at, block_num)
sql = f"""INSERT INTO {SCHEMA_NAME}.hive_communities (id, name, type_id, created_at, block_num)
VALUES (:id, :name, :type_id, :date, :block_num)"""
DB.query(sql, id=_id, name=name, type_id=type_id, date=block_date, block_num=block_num)
# insert owner
sql = """INSERT INTO hive_roles (community_id, account_id, role_id, created_at)
sql = f"""INSERT INTO {SCHEMA_NAME}.hive_roles (community_id, account_id, role_id, created_at)
VALUES (:community_id, :account_id, :role_id, :date)"""
DB.query(sql, community_id=_id, account_id=_id, role_id=Role.owner.value, date=block_date)
@ -161,7 +162,7 @@ class Community:
assert name, 'name is empty'
if name in cls._ids:
return cls._ids[name]
sql = "SELECT id FROM hive_communities WHERE name = :name"
sql = f"SELECT id FROM {SCHEMA_NAME}.hive_communities WHERE name = :name"
cid = DB.query_one(sql, name=name)
if cid:
cls._ids[name] = cid
@ -172,7 +173,7 @@ class Community:
def _get_name(cls, cid):
if cid in cls._names:
return cls._names[cid]
sql = "SELECT name FROM hive_communities WHERE id = :id"
sql = f"SELECT name FROM {SCHEMA_NAME}.hive_communities WHERE id = :id"
name = DB.query_one(sql, id=cid)
if cid:
cls._ids[name] = cid
@ -183,8 +184,8 @@ class Community:
def get_all_muted(cls, community_id):
"""Return a list of all muted accounts."""
return DB.query_col(
"""SELECT name FROM hive_accounts
WHERE id IN (SELECT account_id FROM hive_roles
f"""SELECT name FROM {SCHEMA_NAME}.hive_accounts
WHERE id IN (SELECT account_id FROM {SCHEMA_NAME}.hive_roles
WHERE community_id = :community_id
AND role_id < 0)""",
community_id=community_id,
@ -196,7 +197,7 @@ class Community:
return (
DB.query_one(
"""SELECT role_id FROM hive_roles
f"""SELECT role_id FROM {SCHEMA_NAME}.hive_roles
WHERE community_id = :community_id
AND account_id = :account_id
LIMIT 1""",
@ -337,31 +338,33 @@ class CommunityOp:
# Community-level commands
if action == 'updateProps':
bind = ', '.join([k + " = :" + k for k in list(self.props.keys())])
DB.query(f"UPDATE hive_communities SET {bind} WHERE id = :id", id=self.community_id, **self.props)
DB.query(
f"UPDATE {SCHEMA_NAME}.hive_communities SET {bind} WHERE id = :id", id=self.community_id, **self.props
)
self._notify('set_props', payload=json.dumps(read_key_dict(self.op, 'props')))
elif action == 'subscribe':
DB.query(
"""INSERT INTO hive_subscriptions
f"""INSERT INTO {SCHEMA_NAME}.hive_subscriptions
(account_id, community_id, created_at, block_num)
VALUES (:actor_id, :community_id, :date, :block_num)""",
**params,
)
DB.query(
"""UPDATE hive_communities
f"""UPDATE {SCHEMA_NAME}.hive_communities
SET subscribers = subscribers + 1
WHERE id = :community_id""",
**params,
)
elif action == 'unsubscribe':
DB.query(
"""DELETE FROM hive_subscriptions
f"""DELETE FROM {SCHEMA_NAME}.hive_subscriptions
WHERE account_id = :actor_id
AND community_id = :community_id""",
**params,
)
DB.query(
"""UPDATE hive_communities
f"""UPDATE {SCHEMA_NAME}.hive_communities
SET subscribers = subscribers - 1
WHERE id = :community_id""",
**params,
@ -370,7 +373,7 @@ class CommunityOp:
# Account-level actions
elif action == 'setRole':
DB.query(
"""INSERT INTO hive_roles
f"""INSERT INTO {SCHEMA_NAME}.hive_roles
(account_id, community_id, role_id, created_at)
VALUES (:account_id, :community_id, :role_id, :date)
ON CONFLICT (account_id, community_id)
@ -380,7 +383,7 @@ class CommunityOp:
self._notify('set_role', payload=Role(self.role_id).name)
elif action == 'setUserTitle':
DB.query(
"""INSERT INTO hive_roles
f"""INSERT INTO {SCHEMA_NAME}.hive_roles
(account_id, community_id, title, created_at)
VALUES (:account_id, :community_id, :title, :date)
ON CONFLICT (account_id, community_id)
@ -392,7 +395,7 @@ class CommunityOp:
# Post-level actions
elif action == 'mutePost':
DB.query(
"""UPDATE hive_posts SET is_muted = '1'
f"""UPDATE {SCHEMA_NAME}.hive_posts SET is_muted = '1'
WHERE id = :post_id""",
**params,
)
@ -400,7 +403,7 @@ class CommunityOp:
elif action == 'unmutePost':
DB.query(
"""UPDATE hive_posts SET is_muted = '0'
f"""UPDATE {SCHEMA_NAME}.hive_posts SET is_muted = '0'
WHERE id = :post_id""",
**params,
)
@ -408,14 +411,14 @@ class CommunityOp:
elif action == 'pinPost':
DB.query(
"""UPDATE hive_posts SET is_pinned = '1'
f"""UPDATE {SCHEMA_NAME}.hive_posts SET is_pinned = '1'
WHERE id = :post_id""",
**params,
)
self._notify('pin_post', payload=self.notes)
elif action == 'unpinPost':
DB.query(
"""UPDATE hive_posts SET is_pinned = '0'
f"""UPDATE {SCHEMA_NAME}.hive_posts SET is_pinned = '0'
WHERE id = :post_id""",
**params,
)
@ -493,10 +496,10 @@ class CommunityOp:
_permlink = read_key_str(self.op, 'permlink', 256)
assert _permlink, 'must name a permlink'
sql = """
sql = f"""
SELECT hp.id, community_id
FROM live_posts_comments_view hp
JOIN hive_permlink_data hpd ON hp.permlink_id=hpd.id
FROM {SCHEMA_NAME}.live_posts_comments_view hp
JOIN {SCHEMA_NAME}.hive_permlink_data hpd ON hp.permlink_id=hpd.id
WHERE author_id=:_author AND hpd.permlink=:_permlink
"""
result = DB.query_row(sql, _author=self.account_id, _permlink=_permlink)
@ -601,32 +604,32 @@ class CommunityOp:
def _subscribed(self, account_id):
"""Check an account's subscription status."""
sql = """SELECT 1 FROM hive_subscriptions
sql = f"""SELECT 1 FROM {SCHEMA_NAME}.hive_subscriptions
WHERE community_id = :community_id
AND account_id = :account_id"""
return bool(DB.query_one(sql, community_id=self.community_id, account_id=account_id))
def _muted(self):
"""Check post's muted status."""
sql = "SELECT is_muted FROM hive_posts WHERE id = :id"
sql = f"SELECT is_muted FROM {SCHEMA_NAME}.hive_posts WHERE id = :id"
return bool(DB.query_one(sql, id=self.post_id))
def _parent_muted(self):
"""Check parent post's muted status."""
parent_id = "SELECT parent_id FROM hive_posts WHERE id = :id"
sql = f"SELECT is_muted FROM hive_posts WHERE id = ({parent_id})"
parent_id = f"SELECT parent_id FROM {SCHEMA_NAME}.hive_posts WHERE id = :id"
sql = f"SELECT is_muted FROM {SCHEMA_NAME}.hive_posts WHERE id = ({parent_id})"
return bool(DB.query_one(sql, id=self.post_id))
def _pinned(self):
"""Check post's pinned status."""
sql = "SELECT is_pinned FROM hive_posts WHERE id = :id"
sql = f"SELECT is_pinned FROM {SCHEMA_NAME}.hive_posts WHERE id = :id"
return bool(DB.query_one(sql, id=self.post_id))
def _flagged(self):
"""Check user's flag status."""
from hive.indexer.notify import NotifyType
sql = """SELECT 1 FROM hive_notifs
sql = f"""SELECT 1 FROM {SCHEMA_NAME}.hive_notifs
WHERE community_id = :community_id
AND post_id = :post_id
AND type_id = :type_id

View File

@ -5,6 +5,7 @@ import logging
from funcy.seqs import first
from hive.conf import SCHEMA_NAME
from hive.indexer.accounts import Accounts
from hive.indexer.db_adapter_holder import DbAdapterHolder
from hive.utils.normalize import escape_characters
@ -289,15 +290,14 @@ class Follow(DbAdapterHolder):
if cls.follow_items_to_flush or cls.list_resets_to_flush:
cls.beginTx()
sql = "SELECT {}({}::VARCHAR, {}::INT)"
for reset_list in cls.list_resets_to_flush:
query = sql.format(reset_list['reset_call'], reset_list['follower'], reset_list['block_num'])
cls.db.query_no_return(query)
sql = f"SELECT {SCHEMA_NAME}.{reset_list['reset_call']}({reset_list['follower']}::VARCHAR, {reset_list['block_num']}::INT)"
cls.db.query_no_return(sql)
cls.list_resets_to_flush.clear()
sql = """
INSERT INTO hive_follows as hf (follower, following, created_at, state, blacklisted, follow_blacklists, follow_muted, block_num)
sql = f"""
INSERT INTO {SCHEMA_NAME}.hive_follows as hf (follower, following, created_at, state, blacklisted, follow_blacklists, follow_muted, block_num)
SELECT
ds.follower_id,
ds.following_id,
@ -322,12 +322,12 @@ class Follow(DbAdapterHolder):
FROM
(
VALUES
{}
{{}}
) as T (id, follower, following, created_at, state, blacklisted, follow_blacklists, follow_muted, block_num)
INNER JOIN hive_accounts ha_flr ON ha_flr.name = T.follower
INNER JOIN hive_accounts ha_flg ON ha_flg.name = T.following
INNER JOIN {SCHEMA_NAME}.hive_accounts ha_flr ON ha_flr.name = T.follower
INNER JOIN {SCHEMA_NAME}.hive_accounts ha_flg ON ha_flg.name = T.following
) AS ds(id, follower_id, following_id, created_at, state, blacklisted, follow_blacklists, follow_muted, block_num)
LEFT JOIN hive_follows hfs ON hfs.follower = ds.follower_id AND hfs.following = ds.following_id
LEFT JOIN {SCHEMA_NAME}.hive_follows hfs ON hfs.follower = ds.follower_id AND hfs.following = ds.following_id
ORDER BY ds.id ASC
ON CONFLICT ON CONSTRAINT hive_follows_ux1 DO UPDATE
SET

View File

@ -68,8 +68,6 @@ class BlockHiveDb(Block):
date,
hash,
previous_block_hash,
number_of_transactions,
number_of_operations,
operations,
first_operation_idx,
opertion_id_to_enum,
@ -77,10 +75,8 @@ class BlockHiveDb(Block):
self._num = num
self._date = date
self._hash = hash.tobytes().decode()
self._prev_hash = previous_block_hash.tobytes().decode()
self._number_of_transactions = number_of_transactions
self._number_of_operations = number_of_operations
self._hash = hash.hex()
self._prev_hash = previous_block_hash.hex()
self._operations = operations
self._first_operation_idx = first_operation_idx
self._operation_id_to_enum = opertion_id_to_enum
@ -112,12 +108,6 @@ class BlockHiveDb(Block):
def get_previous_block_hash(self):
return self._prev_hash
def get_number_of_transactions(self):
return self._number_of_transactions
def get_number_of_operations(self):
return self._number_of_operations
def get_next_transaction(self):
if self._first_operation_idx is None:
return None

View File

@ -0,0 +1,39 @@
import logging
from hive.conf import SCHEMA_NAME
from hive.db.adapter import Db
log = logging.getLogger(__name__)
def prepare_app_context(db: Db) -> None:
log.info(f"Looking for '{SCHEMA_NAME}' context.")
ctx_present = db.query_one(f"SELECT hive.app_context_exists('{SCHEMA_NAME}') as ctx_present;")
if not ctx_present:
log.info(f"No application context present. Attempting to create a '{SCHEMA_NAME}' context...")
db.query_no_return(f"SELECT hive.app_create_context('{SCHEMA_NAME}');")
log.info("Application context creation done.")
def context_detach(db: Db) -> None:
is_attached = db.query_one(f"SELECT hive.app_context_is_attached('{SCHEMA_NAME}')")
if not is_attached:
log.info("No attached context - detach skipped.")
return
log.info("Trying to detach app context...")
db.query_no_return(f"SELECT hive.app_context_detach('{SCHEMA_NAME}')")
log.info("App context detaching done.")
def context_attach(db: Db, block_number: int) -> None:
is_attached = db.query_one(f"SELECT hive.app_context_is_attached('{SCHEMA_NAME}')")
if is_attached:
log.info("Context already attached - attaching skipped.")
return
log.info(f"Trying to attach app context with block number: {block_number}")
db.query_no_return(f"SELECT hive.app_context_attach('{SCHEMA_NAME}', {block_number})")
log.info("App context attaching done.")

View File

@ -1,22 +1,21 @@
from concurrent.futures import ThreadPoolExecutor
import logging
import queue
from typing import Final, Optional
from typing import Final, List, Optional
from sqlalchemy import text
from hive.conf import Conf
from hive.db.adapter import Db
from hive.indexer.block import BlocksProviderBase, OperationType, VirtualOperationType
from hive.indexer.hive_db.block import BlockHiveDb
from hive.indexer.hive_rpc.block_from_rest import BlockFromRpc
from hive.indexer.mock_block import ExtendedByMockBlockAdapter
from hive.indexer.mock_block_provider import MockBlockProvider
from hive.indexer.mock_vops_provider import MockVopsProvider
from hive.signals import can_continue_thread, set_exception_thrown
from hive.utils.stats import WaitingStatusManager as WSM
log = logging.getLogger(__name__)
OPERATIONS_QUERY: Final[str] = "SELECT * FROM enum_operations4hivemind(:first, :last)"
BLOCKS_QUERY: Final[str] = "SELECT * FROM enum_blocks4hivemind(:first, :last)"
NUMBER_OF_BLOCKS_QUERY: Final[str] = "SELECT num FROM hive_blocks ORDER BY num DESC LIMIT 1"
OPERATIONS_QUERY: Final[str] = "SELECT * FROM hivemind_app.enum_operations4hivemind(:first, :last)"
BLOCKS_QUERY: Final[str] = "SELECT * FROM hivemind_app.enum_blocks4hivemind(:first, :last)"
class BlocksDataFromDbProvider:
@ -27,49 +26,47 @@ class BlocksDataFromDbProvider:
sql_query: str,
db: Db,
blocks_per_request: int,
lbound: int,
ubound: int,
breaker, # hive.steem.signal.can_continue_thread
exception_reporter, # hive.steem.signal.set_exception_thrown
external_thread_pool: Optional[ThreadPoolExecutor] = None,
):
"""
lbound - block from which the processing starts
ubound - last to get block's number
breaker - callable object which returns true if processing must be continues
exception_reporter - callable, invoke it when an exception occurs in a thread
external_thread_pool - thread pool controlled outside the class
"""
assert breaker
assert blocks_per_request >= 1
self._breaker = breaker
self._exception_reporter = exception_reporter
self._lbound = lbound
self._ubound = ubound # to inlude upperbound in results
self._lbound = None
self._ubound = None
self._db = db
self._thread_pool = external_thread_pool if external_thread_pool else ThreadPoolExecutor(1)
self._blocks_per_request = blocks_per_request
self._sql_query = sql_query
def update_sync_block_range(self, lbound: int, ubound: int) -> None:
self._lbound = lbound
self._ubound = ubound
def thread_body_get_data(self, queue_for_data):
try:
for block in range(self._lbound, self._ubound, self._blocks_per_request):
if not self._breaker():
for block in range(self._lbound, self._ubound + 1, self._blocks_per_request):
if not can_continue_thread():
break
last = min([block + self._blocks_per_request - 1, self._ubound])
data_rows = self._db.query_all(
self._sql_query, first=block, last=min([block + self._blocks_per_request, self._ubound])
)
while self._breaker():
stmt = text(self._sql_query).bindparams(first=block, last=last)
data_rows = self._db.query_all(stmt, is_prepared=True)
if not data_rows:
log.warning(f'DATA ROWS ARE EMPTY! query: {stmt.compile(compile_kwargs={"literal_binds": True})}')
while can_continue_thread():
try:
queue_for_data.put(data_rows, True, 1)
break
except queue.Full:
continue
except:
self._exception_reporter()
set_exception_thrown()
raise
def start(self, queue_for_data):
@ -82,23 +79,14 @@ class MassiveBlocksDataProviderHiveDb(BlocksProviderBase):
_op_types_dictionary = {}
class Databases:
def __init__(self, conf):
self._db_root = Db(
conf.get('hived_database_url'), "MassiveBlocksProvider.Root", conf.get('log_explain_queries')
)
self._db_operations = Db(
conf.get('hived_database_url'), "MassiveBlocksProvider.OperationsData", conf.get('log_explain_queries')
)
self._db_blocks_data = Db(
conf.get('hived_database_url'), "MassiveBlocksProvider.BlocksData", conf.get('log_explain_queries')
)
def __init__(self, db_root: Db, shared: bool = False):
self._db_root = db_root
self._db_operations = db_root.clone('MassiveBlocksProvider_OperationsData') if not shared else None
self._db_blocks_data = db_root.clone('MassiveBlocksProvider_BlocksData') if not shared else None
assert self._db_root
assert self._db_operations
assert self._db_blocks_data
def close(self):
self._db_root.close()
def close_cloned_databases(self):
self._db_operations.close()
self._db_blocks_data.close()
@ -106,72 +94,50 @@ class MassiveBlocksDataProviderHiveDb(BlocksProviderBase):
return self._db_root
def get_operations(self):
return self._db_operations
return self._db_operations or self._db_root
def get_blocks_data(self):
return self._db_blocks_data
return self._db_blocks_data or self._db_root
def __init__(
self,
conf: Conf,
databases: Databases,
number_of_blocks_in_batch: int,
lbound: int,
ubound: int,
breaker, # hive.steem.signal.can_continue_thread
exception_reporter, # hive.steem.signal.set_exception_thrown
external_thread_pool: Optional[ThreadPoolExecutor] = None,
):
"""
databases - object Databases with opened databases
lbound - start blocks
ubound - last block
"""
assert lbound <= ubound
assert lbound >= 0
BlocksProviderBase.__init__(self, breaker, exception_reporter)
BlocksProviderBase.__init__(self)
self._conf = conf
self._databases = databases
self._db = databases.get_root()
self._lbound = lbound
self._ubound = ubound
self._blocks_per_query = number_of_blocks_in_batch
self._first_block_to_get = lbound
self._lbound = None
self._ubound = None
self._blocks_per_query = conf.get('max_batch')
self._blocks_queue = queue.Queue(maxsize=self._blocks_queue_size)
self._operations_queue = queue.Queue(maxsize=self._operations_queue_size)
self._blocks_data_queue = queue.Queue(maxsize=self._blocks_data_queue_size)
self._last_block_num_in_db = self._db.query_one(sql=NUMBER_OF_BLOCKS_QUERY)
assert self._last_block_num_in_db is not None
self._thread_pool = (
external_thread_pool if external_thread_pool else MassiveBlocksDataProviderHiveDb.create_thread_pool()
)
# read all blocks from db, rest of blocks ( ubound - self._last_block_num_in_db ) are supposed to be mocks
self._operations_provider = BlocksDataFromDbProvider(
sql_query=OPERATIONS_QUERY,
db=databases.get_operations(),
blocks_per_request=self._blocks_per_query,
lbound=self._lbound,
ubound=self._last_block_num_in_db + 1,
breaker=breaker,
exception_reporter=exception_reporter,
external_thread_pool=self._thread_pool,
)
self._blocks_data_provider = BlocksDataFromDbProvider(
sql_query=BLOCKS_QUERY,
db=databases.get_blocks_data(),
blocks_per_request=self._blocks_per_query,
lbound=self._lbound,
ubound=self._last_block_num_in_db + 1,
breaker=breaker,
exception_reporter=exception_reporter,
external_thread_pool=self._thread_pool,
)
if not MassiveBlocksDataProviderHiveDb._vop_types_dictionary:
virtual_operations_types_ids = self._db.query_all(
"SELECT id, name FROM hive_operation_types WHERE is_virtual = true"
"SELECT id, name FROM hive.operation_types WHERE is_virtual = true"
)
for id, name in virtual_operations_types_ids:
MassiveBlocksDataProviderHiveDb._vop_types_dictionary[id] = VirtualOperationType.from_name(
@ -180,13 +146,25 @@ class MassiveBlocksDataProviderHiveDb(BlocksProviderBase):
if not MassiveBlocksDataProviderHiveDb._op_types_dictionary:
operations_types_ids = self._db.query_all(
"SELECT id, name FROM hive_operation_types WHERE is_virtual = false"
"SELECT id, name FROM hive.operation_types WHERE is_virtual = false"
)
for id, name in operations_types_ids:
MassiveBlocksDataProviderHiveDb._op_types_dictionary[id] = OperationType.from_name(
name[len('hive::protocol::') :]
)
def update_sync_block_range(self, lbound: int, ubound: int) -> None:
assert lbound <= ubound
assert lbound >= 1
self._lbound = lbound
self._ubound = ubound
self._operations_provider.update_sync_block_range(lbound, ubound)
self._blocks_data_provider.update_sync_block_range(lbound, ubound)
def close_databases(self):
self._databases.close_cloned_databases()
@staticmethod
def _id_to_virtual_type(id_: int):
if id_ in MassiveBlocksDataProviderHiveDb._vop_types_dictionary:
@ -204,53 +182,13 @@ class MassiveBlocksDataProviderHiveDb(BlocksProviderBase):
return vop
return MassiveBlocksDataProviderHiveDb._id_to_operation_type(id_)
@staticmethod
def _get_mocked_block(block_num, always_create):
# normally it should create mocked block only when block mock or vops are added,
# but there is a situation when we ask for mock blocks after the database head,
# we need to alwyas return at least empty block otherwise live sync streamer
# may hang in waiting for new blocks to start process already queued block ( trailing block mechanism)
# that is the reason why 'always_create' parameter was added
# NOTE: it affects only situation when mocks are loaded, otherwiese mock provider methods
# do not return block data
vops = {}
MockVopsProvider.add_mock_vops(vops, block_num, block_num + 1)
block_mock = MockBlockProvider.get_block_data(block_num, bool(vops) or always_create)
if not block_mock:
return None
if vops:
vops = vops[block_num]['ops']
return BlockFromRpc(block_mock, vops)
def _get_mocks_after_db_blocks(self, first_mock_block_num):
for block_proposition in range(first_mock_block_num, self._ubound):
if not self._breaker():
return
mocked_block = self._get_mocked_block(block_proposition, True)
while self._breaker():
try:
self._blocks_queue.put(mocked_block, True, 1)
break
except queue.Full:
continue
def _thread_get_block(self):
try:
currently_received_block = 0
# only mocked blocks are possible
if self._lbound > self._last_block_num_in_db:
self._get_mocks_after_db_blocks(self._lbound)
return
while self._breaker():
blocks_data = self._get_from_queue(self._blocks_data_queue, 1)
while can_continue_thread():
blocks_data = self._get_from_queue(self._blocks_data_queue, 1) # batches of blocks (lists)
operations = self._get_from_queue(self._operations_queue, 1)
if not self._breaker():
if not can_continue_thread():
break
assert len(blocks_data) == 1, "Always one element should be returned"
@ -265,8 +203,6 @@ class MassiveBlocksDataProviderHiveDb(BlocksProviderBase):
block_data['date'],
block_data['hash'],
block_data['prev'],
block_data['tx_number'],
block_data['op_number'],
None,
None,
MassiveBlocksDataProviderHiveDb._operation_id_to_enum,
@ -280,8 +216,6 @@ class MassiveBlocksDataProviderHiveDb(BlocksProviderBase):
block_data['date'],
block_data['hash'],
block_data['prev'],
block_data['tx_number'],
block_data['op_number'],
operations,
idx,
MassiveBlocksDataProviderHiveDb._operation_id_to_enum,
@ -291,30 +225,16 @@ class MassiveBlocksDataProviderHiveDb(BlocksProviderBase):
if operations[block_operation_idx]['block_num'] > block_data['num']:
break
mocked_block = self._get_mocked_block(new_block.get_num(), False)
# live sync with mocks needs this, otherwise stream will wait almost forever for a block
MockBlockProvider.set_last_real_block_num_date(
new_block.get_num(), new_block.get_date(), new_block.get_hash()
)
if mocked_block:
new_block = ExtendedByMockBlockAdapter(new_block, mocked_block)
while self._breaker():
while can_continue_thread():
try:
self._blocks_queue.put(new_block, True, 1)
currently_received_block += 1
if currently_received_block >= (self._ubound - 1):
if block_data['num'] >= self._ubound:
return
break
except queue.Full:
continue
# we reach last block in db, now only mocked blocks are possible
if new_block.get_num() >= self._last_block_num_in_db:
self._get_mocks_after_db_blocks(new_block.get_num() + 1)
return
except:
self._exception_reporter()
set_exception_thrown()
raise
@staticmethod
@ -336,15 +256,24 @@ class MassiveBlocksDataProviderHiveDb(BlocksProviderBase):
self._thread_pool.submit(self._thread_get_block),
] # futures
def get(self, number_of_blocks: int):
def start_without_threading(self):
self._blocks_data_provider.thread_body_get_data(queue_for_data=self._blocks_data_queue)
self._operations_provider.thread_body_get_data(queue_for_data=self._operations_queue)
self._thread_get_block()
def get(self, number_of_blocks: int) -> List[BlockHiveDb]:
"""Returns blocks and vops data for next number_of_blocks"""
log.info(f"blocks_data_queue.qsize: {self._blocks_data_queue.qsize()}")
log.info(f"operations_queue.qsize: {self._operations_queue.qsize()}")
log.info(f"blocks_queue.qsize: {self._blocks_queue.qsize()}")
blocks = []
wait_blocks_time = WSM.start()
if self._blocks_queue.qsize() < number_of_blocks and self._breaker():
if self._blocks_queue.qsize() < number_of_blocks and can_continue_thread():
log.info(f"Awaiting any blocks to process... {self._blocks_queue.qsize()}")
if not self._blocks_queue.empty() or self._breaker():
if not self._blocks_queue.empty() or can_continue_thread():
blocks = self._get_from_queue(self._blocks_queue, number_of_blocks)
WSM.wait_stat('block_consumer_block', WSM.stop(wait_blocks_time))

View File

@ -1,86 +0,0 @@
import logging
from hive.indexer.block import Block, Operation, OperationType, Transaction, VirtualOperationType
log = logging.getLogger(__name__)
class VirtualOperationFromRpc(Operation):
def __init__(self, operation_name, operation_body):
self._operation_type = VirtualOperationType.from_name(operation_name)
self._operation_body = operation_body
def get_type(self):
return self._operation_type
def get_body(self):
return self._operation_body
class OperationFromRpc(Operation):
def __init__(self, operation_name, operation_body):
self._operation_type = OperationType.from_name(operation_name)
self._operation_body = operation_body
def get_type(self):
return self._operation_type
def get_body(self):
return self._operation_body
class TransactionFromRpc(Transaction):
def __init__(self, id, transaction):
self._id = id
self._transaction = transaction
def get_id(self):
return self._id
def get_next_operation(self):
for raw_operation in self._transaction['operations']:
operation = OperationFromRpc(raw_operation['type'], raw_operation['value'])
if not operation.get_type():
continue
yield operation
class BlockFromRpc(Block):
def __init__(self, block_data, virtual_ops):
"""
block_data - raw format of the blocks
virtual_ops - list of virtual ops in the blocks
previous_block_hash - hash of the previous block
"""
self._blocks_data = block_data
self._virtual_ops = virtual_ops
def get_num(self):
return int(self._blocks_data['block_id'][:8], base=16)
def get_date(self):
return self._blocks_data['timestamp']
def get_hash(self):
return self._blocks_data['block_id']
def get_previous_block_hash(self):
return self._blocks_data['previous']
def get_number_of_transactions(self):
return len(self._blocks_data['transactions'])
def get_number_of_operations(self):
txs = self._blocks_data['transactions']
return sum([len(tx['operations']) for tx in txs])
def get_next_vop(self):
for vop in self._virtual_ops:
vop_object = VirtualOperationFromRpc(vop['type'], vop['value'])
if not vop_object.get_type():
continue
yield vop_object
def get_next_transaction(self):
for tx_idx, tx in enumerate(self._blocks_data['transactions']):
yield TransactionFromRpc(tx_idx, tx)

View File

@ -1,153 +0,0 @@
from concurrent.futures import ThreadPoolExecutor
import logging
import queue
from hive.indexer.mock_block_provider import MockBlockProvider
log = logging.getLogger(__name__)
class BlocksProvider:
"""Starts threads which request node for blocks, and collect responses to one queue"""
def __init__(
cls,
http_client,
number_of_threads,
blocks_per_request,
start_block,
max_block,
breaker,
exception_reporter,
external_thread_pool=None,
):
"""
http_client - object which will ask the node for blocks
number_of_threads - how many threads will be used to ask for blocks
start_block - block from which the processing starts
max_block - last to get block's number
breaker - callable object which returns true if processing must be continues
exception_reporter - callable, invoke to report an undesire exception in a thread
external_thread_pool - thread pool controlled outside the class
"""
assert number_of_threads > 0
assert max_block > start_block
assert breaker
assert exception_reporter
assert http_client
assert blocks_per_request >= 1
cls._responses_queues = []
cls._breaker = breaker
cls._exception_reporter = exception_reporter
cls._start_block = start_block
cls._max_block = max_block # to inlude upperbound in results
cls._http_client = http_client
if external_thread_pool:
assert type(external_thread_pool) == ThreadPoolExecutor
cls._thread_pool = external_thread_pool
else:
cls._thread_pool = ThreadPoolExecutor(BlocksProvider.get_number_of_threads(number_of_threads))
cls._number_of_threads = number_of_threads
cls._blocks_per_request = blocks_per_request
# prepare quques and threads
for i in range(0, number_of_threads):
cls._responses_queues.append(queue.Queue(maxsize=50))
def get_number_of_threads(number_of_threads):
"""Return number of used thread if user want to collects blocks in some threads number
number_of_threads - how many threds will ask for blocks
"""
return number_of_threads + 1 # +1 because of a thread for collecting blocks from threads
def thread_body_get_block(cls, blocks_shift):
try:
for block in range(
cls._start_block + blocks_shift * cls._blocks_per_request,
cls._max_block,
cls._number_of_threads * cls._blocks_per_request,
):
if not cls._breaker():
return
results = []
number_of_expected_blocks = 1
query_param = [
{'block_num': i} for i in range(block, min([block + cls._blocks_per_request, cls._max_block]))
]
number_of_expected_blocks = len(query_param)
results = cls._http_client.exec('get_block', query_param, True)
if results:
while cls._breaker():
try:
cls._responses_queues[blocks_shift].put(results, True, 1)
break
except queue.Full:
continue
except:
cls._exception_reporter()
raise
def thread_body_blocks_collector(cls, queue_for_blocks):
try:
currently_received_block = cls._start_block - 1
while cls._breaker():
# take in order all blocks from threads queues
for blocks_queue in range(0, cls._number_of_threads):
if not cls._breaker():
return
while cls._breaker():
try:
blocks = cls._responses_queues[blocks_queue].get(True, 1)
cls._responses_queues[blocks_queue].task_done()
# split blocks range
for block in blocks:
if 'block' in block:
MockBlockProvider.set_last_real_block_num_date(
currently_received_block + 1,
block['block']['timestamp'],
block['block']['block_id'],
)
block_mock = MockBlockProvider.get_block_data(currently_received_block + 1, True)
if block_mock is not None:
if 'block' in block:
block["block"]["transactions"].extend(block_mock["transactions"])
else:
block["block"] = block_mock
log.warning(
f"Pure mock block: id {block_mock['block_id']}, previous {block_mock['previous']}"
)
block_for_queue = None if not 'block' in block else block['block']
while cls._breaker():
try:
queue_for_blocks.put(block_for_queue, True, 1)
currently_received_block += 1
if currently_received_block >= (cls._max_block - 1):
return
break
except queue.Full:
continue
break
except queue.Empty:
continue
except:
cls._exception_reporter()
raise
def start(cls, queue_for_blocks):
futures = []
for future_number in range(0, cls._number_of_threads):
future = cls._thread_pool.submit(cls.thread_body_get_block, future_number)
futures.append(future)
future = cls._thread_pool.submit(cls.thread_body_blocks_collector, queue_for_blocks)
futures.append(future)
return futures

View File

@ -1,124 +0,0 @@
from concurrent.futures import ThreadPoolExecutor
import logging
import queue
from hive.indexer.block import BlocksProviderBase
from hive.indexer.hive_rpc.block_from_rest import BlockFromRpc
from hive.indexer.hive_rpc.blocks_provider import BlocksProvider
from hive.indexer.hive_rpc.vops_provider import VopsProvider
from hive.utils.stats import WaitingStatusManager as WSM
log = logging.getLogger(__name__)
class MassiveBlocksDataProviderHiveRpc(BlocksProviderBase):
def __init__(
self,
conf,
node_client,
blocks_get_threads,
vops_get_threads,
number_of_blocks_data_in_one_batch,
lbound,
ubound,
breaker,
exception_reporter,
external_thread_pool=None,
):
"""
conf - configuration
node_client - SteemClient
blocks_get_threads - number of threads which get blocks from node
vops_get_threads - number of threads which get virtual operations from node
number_of_blocks_data_in_one_batch - number of blocks which will be asked for the node in one HTTP get
lbound - first block to get
ubound - last block to get
breaker - callable, returns False when processing must be stopped
exception_reporter - callable, invoke to report an undesire exception in a thread
external_thread_pool - thread pool controlled outside the class
"""
BlocksProviderBase.__init__(self, breaker, exception_reporter)
thread_pool = None
if external_thread_pool:
assert type(external_thread_pool) == ThreadPoolExecutor
thread_pool = external_thread_pool
else:
thread_pool = MassiveBlocksDataProviderHiveRpc.create_thread_pool(blocks_get_threads, vops_get_threads)
self.blocks_provider = BlocksProvider(
node_client._client["get_block"] if "get_block" in node_client._client else node_client._client["default"],
blocks_get_threads,
number_of_blocks_data_in_one_batch,
lbound,
ubound,
breaker,
exception_reporter,
thread_pool,
)
self.vops_provider = VopsProvider(
conf,
node_client,
vops_get_threads,
number_of_blocks_data_in_one_batch,
lbound,
ubound,
breaker,
exception_reporter,
thread_pool,
)
self.vops_queue = queue.Queue(maxsize=self._operations_queue_size)
self.blocks_queue = queue.Queue(maxsize=self._blocks_data_queue_size)
@staticmethod
def create_thread_pool(threads_for_blocks, threads_for_vops):
"""Creates initialzied thread pool with number of threads required by the provider.
You can pass the thread pool to provider during its creation to controll its lifetime
outside the provider"""
return ThreadPoolExecutor(
BlocksProvider.get_number_of_threads(threads_for_blocks)
+ VopsProvider.get_number_of_threads(threads_for_vops)
)
def get(self, number_of_blocks):
"""Returns blocks and vops data for next number_of_blocks"""
vops_and_blocks = {'vops': [], 'blocks': []}
log.info(f"vops_queue.qsize: {self.vops_queue.qsize()} blocks_queue.qsize: {self.blocks_queue.qsize()}")
wait_vops_time = WSM.start()
if self.vops_queue.qsize() < number_of_blocks and self._breaker():
log.info("Awaiting any vops to process...")
if not self.vops_queue.empty() or self._breaker():
vops = self._get_from_queue(self.vops_queue, number_of_blocks)
if self._breaker():
assert len(vops) == number_of_blocks
vops_and_blocks['vops'] = vops
WSM.wait_stat('block_consumer_vop', WSM.stop(wait_vops_time))
wait_blocks_time = WSM.start()
if (self.blocks_queue.qsize() < number_of_blocks) and self._breaker():
log.info("Awaiting any block to process...")
if not self.blocks_queue.empty() or self._breaker():
vops_and_blocks['blocks'] = self._get_from_queue(self.blocks_queue, number_of_blocks)
WSM.wait_stat('block_consumer_block', WSM.stop(wait_blocks_time))
result = []
for vop_nr in range(len(vops_and_blocks['blocks'])):
if vops_and_blocks['blocks'][vop_nr] is not None:
result.append(BlockFromRpc(vops_and_blocks['blocks'][vop_nr], vops_and_blocks['vops'][vop_nr]))
return result
def start(self):
futures = self.blocks_provider.start(self.blocks_queue)
futures.extend(self.vops_provider.start(self.vops_queue))
return futures

View File

@ -1,154 +0,0 @@
from concurrent.futures import ThreadPoolExecutor
import logging
import queue
log = logging.getLogger(__name__)
class VopsProvider:
"""Starts threads which request node for blocks, and collect responses to one queue"""
def __init__(
cls,
conf,
client,
number_of_threads,
blocks_per_request,
start_block,
max_block,
breaker,
exception_reporter,
external_thread_pool=None,
):
"""
conf - configuration
steem client - object which will ask the node for blocks
number_of_threads - how many threads will be used to ask for blocks
start_block - block from which the processing starts
max_block - last to get block's number
breaker - callable object which returns true if processing must be continues
exception_reporter - callable, invoke to report an undesire exception in a thread
external_thread_pool - thread pool controlled outside the class
"""
assert conf
assert number_of_threads > 0
assert max_block > start_block
assert breaker
assert exception_reporter
assert client
assert blocks_per_request >= 1
cls._conf = conf
cls._responses_queues = []
cls._breaker = breaker
cls._exception_reporter = exception_reporter
cls._start_block = start_block
cls._max_block = max_block # to inlude upperbound in results
cls._client = client
if external_thread_pool:
assert type(external_thread_pool) == ThreadPoolExecutor
cls._thread_pool = external_thread_pool
else:
cls._thread_pool = ThreadPoolExecutor(VopsProvider.get_number_of_threads(number_of_threads))
cls._number_of_threads = number_of_threads
cls._blocks_per_request = blocks_per_request
cls.currently_received_block = cls._start_block - 1
# prepare quques and threads
for i in range(0, number_of_threads):
cls._responses_queues.append(queue.Queue(maxsize=50))
def get_number_of_threads(number_of_threads):
"""Return number of used thread if user want to collects virtual operations in some threads number
number_of_threads - how many threads will ask for vops
"""
return number_of_threads + 1 # +1 because of a thread for collecting blocks from threads
@staticmethod
def get_virtual_operation_for_blocks(client, conf, start_block_num, number_of_blocks):
return client.enum_virtual_ops(conf, start_block_num, start_block_num + number_of_blocks)
def thread_body_get_block(cls, blocks_shift):
try:
for block in range(
cls._start_block + blocks_shift * cls._blocks_per_request,
cls._max_block + cls._blocks_per_request,
cls._number_of_threads * cls._blocks_per_request,
):
if not cls._breaker():
return
results = VopsProvider.get_virtual_operation_for_blocks(
cls._client, cls._conf, block, cls._blocks_per_request
)
while cls._breaker():
try:
cls._responses_queues[blocks_shift].put(results, True, 1)
break
except queue.Full:
continue
except:
cls._exception_reporter()
raise
def _fill_queue_with_no_vops(cls, queue_for_vops, number_of_no_vops):
for vop in range(0, number_of_no_vops):
while cls._breaker():
try:
queue_for_vops.put([], True, 1)
cls.currently_received_block += 1
if cls.currently_received_block >= (cls._max_block - 1):
return True
break
except queue.Full:
continue
return False
def thread_body_blocks_collector(cls, queue_for_vops):
try:
while cls._breaker():
# take in order all vops from threads queues
for vops_queue in range(0, cls._number_of_threads):
if not cls._breaker():
return
while cls._breaker():
try:
vops = cls._responses_queues[vops_queue].get(True, 1)
cls._responses_queues[vops_queue].task_done()
# split blocks range
if not vops:
if cls._fill_queue_with_no_vops(queue_for_vops, cls._blocks_per_request):
return
else:
for block in vops:
if cls._fill_queue_with_no_vops(
queue_for_vops, block - (cls.currently_received_block + 1)
):
return
vop = vops[block]
while cls._breaker():
try:
queue_for_vops.put(vop['ops'], True, 1)
cls.currently_received_block += 1
if cls.currently_received_block >= (cls._max_block - 1):
return
break
except queue.Full:
continue
break
except queue.Empty:
continue
except:
cls._exception_reporter()
raise
def start(cls, queue_for_vops):
futures = []
for future_number in range(0, cls._number_of_threads):
future = cls._thread_pool.submit(cls.thread_body_get_block, future_number)
futures.append(future)
future = cls._thread_pool.submit(cls.thread_body_blocks_collector, queue_for_vops)
futures.append(future)
return futures

View File

@ -1,40 +0,0 @@
from hive.indexer.block import Block
class ExtendedByMockBlockAdapter(Block):
def __init__(self, block, extended_block):
assert issubclass(type(block), Block)
assert issubclass(type(extended_block), Block)
self._wrapped_block = block
self._extended_block = extended_block
def get_num(self):
return self._wrapped_block.get_num()
def get_next_vop(self):
for vop in self._wrapped_block.get_next_vop():
yield vop
for vop in self._extended_block.get_next_vop():
yield vop
def get_date(self):
return self._wrapped_block.get_date()
def get_hash(self):
return self._wrapped_block.get_hash()
def get_previous_block_hash(self):
return self._wrapped_block.get_previous_block_hash()
def get_number_of_transactions(self):
return self._wrapped_block.get_number_of_transactions() + self._extended_block.get_number_of_transactions()
def get_number_of_operations(self):
return self._wrapped_block.get_number_of_operations() + self._extended_block.get_number_of_operations()
def get_next_transaction(self):
for transaction in self._wrapped_block.get_next_transaction():
yield transaction
for transaction in self._extended_block.get_next_transaction():
yield transaction

View File

@ -1,72 +0,0 @@
""" Data provider for test vops """
from hive.indexer.mock_data_provider import MockDataProvider
class MockVopsProvider(MockDataProvider):
"""Data provider for test vops"""
block_data = {'ops': {}, 'ops_by_block': {}}
@classmethod
def add_block_data_from_file(cls, file_name):
from json import load
data = {}
with open(file_name, "r") as src:
data = load(src)
cls.add_block_data(data)
@classmethod
def add_block_data(cls, data):
if 'ops' in data:
for op in data['ops']:
if 'ops' in cls.block_data and op['block'] in cls.block_data['ops']:
cls.block_data['ops'][op['block']].append(op)
else:
cls.block_data['ops'][op['block']] = [op]
if 'ops_by_block' in data:
for ops in data['ops_by_block']:
if 'ops_by_block' in cls.block_data and ops['block'] in cls.block_data['ops_by_block']:
cls.block_data['ops_by_block'][ops['block']].extend(ops['ops'])
else:
cls.block_data['ops_by_block'][ops['block']] = ops
@classmethod
def get_block_data(cls, block_num):
ret = {}
if 'ops' in cls.block_data and block_num in cls.block_data['ops']:
data = cls.block_data['ops'][block_num]
if data:
if 'ops' in ret:
ret['ops'].extend([op['op'] for op in data])
else:
ret['ops'] = [op['op'] for op in data]
if 'ops_by_block' in cls.block_data and block_num in cls.block_data['ops_by_block']:
data = cls.block_data['ops_by_block'][block_num]
if data:
if 'ops_by_block' in ret:
ret['ops_by_block'].extend([ops['op'] for ops in data['ops']])
else:
ret['ops_by_block'] = [ops['op'] for ops in data['ops']]
return ret
@classmethod
def add_mock_vops(cls, ret, from_block, end_block):
# dont do anyting when there is no block data
if not cls.block_data['ops_by_block'] and not cls.block_data['ops']:
return
for block_num in range(from_block, end_block):
mock_vops = cls.get_block_data(block_num)
if mock_vops:
if block_num in ret:
if 'ops_by_block' in mock_vops:
ret[block_num]['ops'].extend(mock_vops['ops_by_block'])
if 'ops' in mock_vops:
ret[block_num]['ops'].extend(mock_vops['ops'])
else:
if 'ops' in mock_vops:
ret[block_num] = {"ops": mock_vops['ops']}
if 'ops_by_block' in mock_vops:
ret[block_num] = {"ops": mock_vops['ops_by_block']}

Some files were not shown because too many files have changed in this diff Show More