mirror of
https://git.joinsharkey.org/Sharkey/Sharkey.git
synced 2024-11-25 20:03:08 +02:00
Compare commits
No commits in common. "fb455e4fd9fc086203b95b677114ce6a49898743" and "b77c025245a2ba4c425f88a0a2f7c1e84cf21964" have entirely different histories.
fb455e4fd9
...
b77c025245
5 changed files with 20 additions and 350 deletions
216
.config/ci.yml
216
.config/ci.yml
|
@ -1,216 +0,0 @@
|
||||||
#━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
|
||||||
# Misskey configuration
|
|
||||||
#━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
|
||||||
|
|
||||||
# ┌─────┐
|
|
||||||
#───┘ URL └─────────────────────────────────────────────────────
|
|
||||||
|
|
||||||
# Final accessible URL seen by a user.
|
|
||||||
url: https://example.tld/
|
|
||||||
|
|
||||||
# ONCE YOU HAVE STARTED THE INSTANCE, DO NOT CHANGE THE
|
|
||||||
# URL SETTINGS AFTER THAT!
|
|
||||||
|
|
||||||
# ┌───────────────────────┐
|
|
||||||
#───┘ Port and TLS settings └───────────────────────────────────
|
|
||||||
|
|
||||||
#
|
|
||||||
# Misskey requires a reverse proxy to support HTTPS connections.
|
|
||||||
#
|
|
||||||
# +----- https://example.tld/ ------------+
|
|
||||||
# +------+ |+-------------+ +----------------+|
|
|
||||||
# | User | ---> || Proxy (443) | ---> | Misskey (3000) ||
|
|
||||||
# +------+ |+-------------+ +----------------+|
|
|
||||||
# +---------------------------------------+
|
|
||||||
#
|
|
||||||
# You need to set up a reverse proxy. (e.g. nginx)
|
|
||||||
# An encrypted connection with HTTPS is highly recommended
|
|
||||||
# because tokens may be transferred in GET requests.
|
|
||||||
|
|
||||||
# The port that your Misskey server should listen on.
|
|
||||||
port: 3000
|
|
||||||
|
|
||||||
# ┌──────────────────────────┐
|
|
||||||
#───┘ PostgreSQL configuration └────────────────────────────────
|
|
||||||
|
|
||||||
db:
|
|
||||||
host: postgres
|
|
||||||
port: 5432
|
|
||||||
|
|
||||||
# Database name
|
|
||||||
db: postgres
|
|
||||||
|
|
||||||
# Auth
|
|
||||||
user: postgres
|
|
||||||
pass: ci
|
|
||||||
|
|
||||||
# Whether disable Caching queries
|
|
||||||
#disableCache: true
|
|
||||||
|
|
||||||
# Extra Connection options
|
|
||||||
#extra:
|
|
||||||
# ssl: true
|
|
||||||
|
|
||||||
dbReplications: false
|
|
||||||
|
|
||||||
# You can configure any number of replicas here
|
|
||||||
#dbSlaves:
|
|
||||||
# -
|
|
||||||
# host:
|
|
||||||
# port:
|
|
||||||
# db:
|
|
||||||
# user:
|
|
||||||
# pass:
|
|
||||||
# -
|
|
||||||
# host:
|
|
||||||
# port:
|
|
||||||
# db:
|
|
||||||
# user:
|
|
||||||
# pass:
|
|
||||||
|
|
||||||
# ┌─────────────────────┐
|
|
||||||
#───┘ Redis configuration └─────────────────────────────────────
|
|
||||||
|
|
||||||
redis:
|
|
||||||
host: redis
|
|
||||||
port: 6379
|
|
||||||
#family: 0 # 0=Both, 4=IPv4, 6=IPv6
|
|
||||||
#pass: example-pass
|
|
||||||
#prefix: example-prefix
|
|
||||||
#db: 1
|
|
||||||
|
|
||||||
#redisForPubsub:
|
|
||||||
# host: redis
|
|
||||||
# port: 6379
|
|
||||||
# #family: 0 # 0=Both, 4=IPv4, 6=IPv6
|
|
||||||
# #pass: example-pass
|
|
||||||
# #prefix: example-prefix
|
|
||||||
# #db: 1
|
|
||||||
|
|
||||||
#redisForJobQueue:
|
|
||||||
# host: redis
|
|
||||||
# port: 6379
|
|
||||||
# #family: 0 # 0=Both, 4=IPv4, 6=IPv6
|
|
||||||
# #pass: example-pass
|
|
||||||
# #prefix: example-prefix
|
|
||||||
# #db: 1
|
|
||||||
|
|
||||||
#redisForTimelines:
|
|
||||||
# host: redis
|
|
||||||
# port: 6379
|
|
||||||
# #family: 0 # 0=Both, 4=IPv4, 6=IPv6
|
|
||||||
# #pass: example-pass
|
|
||||||
# #prefix: example-prefix
|
|
||||||
# #db: 1
|
|
||||||
|
|
||||||
# ┌───────────────────────────┐
|
|
||||||
#───┘ MeiliSearch configuration └─────────────────────────────
|
|
||||||
|
|
||||||
# You can set scope to local (default value) or global
|
|
||||||
# (include notes from remote).
|
|
||||||
|
|
||||||
#meilisearch:
|
|
||||||
# host: meilisearch
|
|
||||||
# port: 7700
|
|
||||||
# apiKey: ''
|
|
||||||
# ssl: true
|
|
||||||
# index: ''
|
|
||||||
# scope: global
|
|
||||||
|
|
||||||
# ┌───────────────┐
|
|
||||||
#───┘ ID generation └───────────────────────────────────────────
|
|
||||||
|
|
||||||
# You can select the ID generation method.
|
|
||||||
# You don't usually need to change this setting, but you can
|
|
||||||
# change it according to your preferences.
|
|
||||||
|
|
||||||
# Available methods:
|
|
||||||
# aid ... Short, Millisecond accuracy
|
|
||||||
# aidx ... Millisecond accuracy
|
|
||||||
# meid ... Similar to ObjectID, Millisecond accuracy
|
|
||||||
# ulid ... Millisecond accuracy
|
|
||||||
# objectid ... This is left for backward compatibility
|
|
||||||
|
|
||||||
# ONCE YOU HAVE STARTED THE INSTANCE, DO NOT CHANGE THE
|
|
||||||
# ID SETTINGS AFTER THAT!
|
|
||||||
|
|
||||||
id: 'aidx'
|
|
||||||
|
|
||||||
# ┌─────────────────────┐
|
|
||||||
#───┘ Other configuration └─────────────────────────────────────
|
|
||||||
|
|
||||||
# Whether disable HSTS
|
|
||||||
#disableHsts: true
|
|
||||||
|
|
||||||
# Number of worker processes
|
|
||||||
#clusterLimit: 1
|
|
||||||
|
|
||||||
# Job concurrency per worker
|
|
||||||
# deliverJobConcurrency: 128
|
|
||||||
# inboxJobConcurrency: 16
|
|
||||||
# relashionshipJobConcurrency: 16
|
|
||||||
# What's relashionshipJob?:
|
|
||||||
# Follow, unfollow, block and unblock(ings) while following-imports, etc. or account migrations.
|
|
||||||
|
|
||||||
# Job rate limiter
|
|
||||||
# deliverJobPerSec: 128
|
|
||||||
# inboxJobPerSec: 32
|
|
||||||
# relashionshipJobPerSec: 64
|
|
||||||
|
|
||||||
# Job attempts
|
|
||||||
# deliverJobMaxAttempts: 12
|
|
||||||
# inboxJobMaxAttempts: 8
|
|
||||||
|
|
||||||
# Local address used for outgoing requests
|
|
||||||
#outgoingAddress: 127.0.0.1
|
|
||||||
|
|
||||||
# IP address family used for outgoing request (ipv4, ipv6 or dual)
|
|
||||||
#outgoingAddressFamily: ipv4
|
|
||||||
|
|
||||||
# Amount of characters that can be used when writing notes (maximum: 8192, minimum: 1)
|
|
||||||
maxNoteLength: 3000
|
|
||||||
|
|
||||||
# Proxy for HTTP/HTTPS
|
|
||||||
#proxy: http://127.0.0.1:3128
|
|
||||||
|
|
||||||
proxyBypassHosts:
|
|
||||||
- api.deepl.com
|
|
||||||
- api-free.deepl.com
|
|
||||||
- www.recaptcha.net
|
|
||||||
- hcaptcha.com
|
|
||||||
- challenges.cloudflare.com
|
|
||||||
|
|
||||||
# Proxy for SMTP/SMTPS
|
|
||||||
#proxySmtp: http://127.0.0.1:3128 # use HTTP/1.1 CONNECT
|
|
||||||
#proxySmtp: socks4://127.0.0.1:1080 # use SOCKS4
|
|
||||||
#proxySmtp: socks5://127.0.0.1:1080 # use SOCKS5
|
|
||||||
|
|
||||||
# Media Proxy
|
|
||||||
#mediaProxy: https://example.com/proxy
|
|
||||||
|
|
||||||
# Proxy remote files (default: true)
|
|
||||||
# Proxy remote files by this instance or mediaProxy to prevent remote files from running in remote domains.
|
|
||||||
proxyRemoteFiles: true
|
|
||||||
|
|
||||||
# Movie Thumbnail Generation URL
|
|
||||||
# There is no reference implementation.
|
|
||||||
# For example, Misskey will point to the following URL:
|
|
||||||
# https://example.com/thumbnail.webp?thumbnail=1&url=https%3A%2F%2Fstorage.example.com%2Fpath%2Fto%2Fvideo.mp4
|
|
||||||
#videoThumbnailGenerator: https://example.com
|
|
||||||
|
|
||||||
# Sign to ActivityPub GET request (default: true)
|
|
||||||
signToActivityPubGet: true
|
|
||||||
# check that inbound ActivityPub GET requests are signed ("authorized fetch")
|
|
||||||
checkActivityPubGetSignature: false
|
|
||||||
|
|
||||||
# For security reasons, uploading attachments from the intranet is prohibited,
|
|
||||||
# but exceptions can be made from the following settings. Default value is "undefined".
|
|
||||||
# Read changelog to learn more (Improvements of 12.90.0 (2021/09/04)).
|
|
||||||
#allowedPrivateNetworks: [
|
|
||||||
# '127.0.0.1/32'
|
|
||||||
#]
|
|
||||||
|
|
||||||
#customMOTD: ['Hello World', 'The sharks rule all', 'Shonks']
|
|
||||||
|
|
||||||
# Upload or download file size limits (bytes)
|
|
||||||
#maxFileSize: 262144000
|
|
108
.gitlab-ci.yml
108
.gitlab-ci.yml
|
@ -1,108 +0,0 @@
|
||||||
stages:
|
|
||||||
- test
|
|
||||||
- deploy
|
|
||||||
|
|
||||||
testCommit:
|
|
||||||
stage: test
|
|
||||||
image: node:latest
|
|
||||||
services:
|
|
||||||
- postgres:15
|
|
||||||
- redis
|
|
||||||
variables:
|
|
||||||
POSTGRES_PASSWORD: ci
|
|
||||||
script:
|
|
||||||
- apt-get update && apt-get install -y git wget curl build-essential python3
|
|
||||||
- cp .config/ci.yml .config/default.yml
|
|
||||||
- corepack enable
|
|
||||||
- corepack prepare pnpm@latest --activate
|
|
||||||
- git submodule update --init
|
|
||||||
- pnpm install --frozen-lockfile
|
|
||||||
- pnpm run build
|
|
||||||
- pnpm run migrate
|
|
||||||
cache:
|
|
||||||
key: test
|
|
||||||
policy: pull-push
|
|
||||||
when: on_success
|
|
||||||
paths:
|
|
||||||
- node_modules/
|
|
||||||
- packages/*/node_modules/
|
|
||||||
only:
|
|
||||||
- develop
|
|
||||||
- merge_requests
|
|
||||||
- stable
|
|
||||||
|
|
||||||
getImageTag:
|
|
||||||
stage: deploy
|
|
||||||
image: ubuntu:latest
|
|
||||||
script:
|
|
||||||
- apt-get update && apt-get install -y jq
|
|
||||||
- |
|
|
||||||
if test -n "$CI_COMMIT_TAG"; then
|
|
||||||
tag="$CI_COMMIT_TAG"
|
|
||||||
elif test "$CI_COMMIT_BRANCH" == "stable"; then
|
|
||||||
tag="latest"
|
|
||||||
elif test "$CI_COMMIT_BRANCH" == "develop"; then
|
|
||||||
tag="develop"
|
|
||||||
else
|
|
||||||
tag="$CI_COMMIT_BRANCH"
|
|
||||||
fi
|
|
||||||
version=$(cat package.json | jq -r '.version')
|
|
||||||
- echo "REGISTRY_PUSH_TAG=$tag" >> build.env
|
|
||||||
- echo "REGISTRY_PUSH_VERSION=$version" >> build.env
|
|
||||||
artifacts:
|
|
||||||
reports:
|
|
||||||
dotenv: build.env
|
|
||||||
only:
|
|
||||||
- stable
|
|
||||||
- develop
|
|
||||||
- tags
|
|
||||||
buildDocker:
|
|
||||||
stage: deploy
|
|
||||||
needs:
|
|
||||||
- job: getImageTag
|
|
||||||
artifacts: true
|
|
||||||
parallel:
|
|
||||||
matrix:
|
|
||||||
- ARCH: amd64
|
|
||||||
- ARCH: arm64
|
|
||||||
tags:
|
|
||||||
- ${ARCH}
|
|
||||||
image:
|
|
||||||
name: gcr.io/kaniko-project/executor:debug
|
|
||||||
entrypoint: [""]
|
|
||||||
script:
|
|
||||||
- >-
|
|
||||||
/kaniko/executor
|
|
||||||
--context "${CI_PROJECT_DIR}"
|
|
||||||
--dockerfile "${CI_PROJECT_DIR}/Dockerfile"
|
|
||||||
--destination "${CI_REGISTRY_IMAGE}:${REGISTRY_PUSH_VERSION}-${ARCH}"
|
|
||||||
only:
|
|
||||||
- stable
|
|
||||||
- develop
|
|
||||||
- tags
|
|
||||||
mergeManifests:
|
|
||||||
stage: deploy
|
|
||||||
needs:
|
|
||||||
- job: buildDocker
|
|
||||||
artifacts: false
|
|
||||||
- job: getImageTag
|
|
||||||
artifacts: true
|
|
||||||
tags:
|
|
||||||
- docker
|
|
||||||
image:
|
|
||||||
name: mplatform/manifest-tool:alpine
|
|
||||||
entrypoint: [""]
|
|
||||||
script:
|
|
||||||
- >-
|
|
||||||
manifest-tool
|
|
||||||
--username=${CI_REGISTRY_USER}
|
|
||||||
--password=${CI_REGISTRY_PASSWORD}
|
|
||||||
push from-args
|
|
||||||
--platforms linux/amd64,linux/arm64
|
|
||||||
--tags ${REGISTRY_PUSH_VERSION}
|
|
||||||
--template ${CI_REGISTRY_IMAGE}:${REGISTRY_PUSH_VERSION}-ARCH
|
|
||||||
--target ${CI_REGISTRY_IMAGE}:${REGISTRY_PUSH_TAG}
|
|
||||||
only:
|
|
||||||
- stable
|
|
||||||
- develop
|
|
||||||
- tags
|
|
|
@ -21,6 +21,8 @@
|
||||||
|
|
||||||
<a href="https://ko-fi.com/transfem">
|
<a href="https://ko-fi.com/transfem">
|
||||||
<img src="https://custom-icon-badges.herokuapp.com/badge/donate-F96854?logoColor=F96854&style=for-the-badge&logo=kofi&labelColor=363B40" alt="donate"/></a>
|
<img src="https://custom-icon-badges.herokuapp.com/badge/donate-F96854?logoColor=F96854&style=for-the-badge&logo=kofi&labelColor=363B40" alt="donate"/></a>
|
||||||
|
<a href="https://hosted.weblate.org/projects/sharkey/">
|
||||||
|
<img src="https://custom-icon-badges.herokuapp.com/badge/translate-sharkey-124437?logoColor=acea31&style=for-the-badge&logo=translate-sharkey&labelColor=363B40" alt="Translate Sharkey"/></a>
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
|
|
@ -2,7 +2,7 @@ version: "3"
|
||||||
|
|
||||||
services:
|
services:
|
||||||
web:
|
web:
|
||||||
# image: registry.activitypub.software/transfem-org/sharkey:latest
|
# image: git.joinsharkey.org/sharkey/sharkey:latest
|
||||||
build: .
|
build: .
|
||||||
restart: always
|
restart: always
|
||||||
links:
|
links:
|
||||||
|
|
|
@ -130,17 +130,6 @@ export class ImportNotesProcessorService {
|
||||||
return typeof obj[Symbol.iterator] === 'function';
|
return typeof obj[Symbol.iterator] === 'function';
|
||||||
}
|
}
|
||||||
|
|
||||||
private parseTwitterFile(str : string) : null | [{ tweet: any }] {
|
|
||||||
const removed = str.replace(new RegExp('window\\.YTD\\.tweets\\.part0 = ', 'g'), '');
|
|
||||||
|
|
||||||
try {
|
|
||||||
return JSON.parse(removed);
|
|
||||||
} catch (error) {
|
|
||||||
//The format is not what we expected. Either this file was tampered with or twitters exports changed
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@bindThis
|
@bindThis
|
||||||
public async process(job: Bull.Job<DbNoteImportJobData>): Promise<void> {
|
public async process(job: Bull.Job<DbNoteImportJobData>): Promise<void> {
|
||||||
this.logger.info(`Starting note import of ${job.data.user.id} ...`);
|
this.logger.info(`Starting note import of ${job.data.user.id} ...`);
|
||||||
|
@ -186,20 +175,23 @@ export class ImportNotesProcessorService {
|
||||||
try {
|
try {
|
||||||
this.logger.succ(`Unzipping to ${outputPath}`);
|
this.logger.succ(`Unzipping to ${outputPath}`);
|
||||||
ZipReader.withDestinationPath(outputPath).viaBuffer(await fs.promises.readFile(destPath));
|
ZipReader.withDestinationPath(outputPath).viaBuffer(await fs.promises.readFile(destPath));
|
||||||
|
const fakeWindow: any = {
|
||||||
const unprocessedTweetJson = this.parseTwitterFile(fs.readFileSync(outputPath + '/data/tweets.js', 'utf-8'));
|
window: {
|
||||||
|
YTD: {
|
||||||
//Make sure that it isnt null (because if something went wrong in parseTwitterFile it returns null)
|
tweets: {
|
||||||
if (unprocessedTweetJson) {
|
part0: {},
|
||||||
const tweets = Object.keys(unprocessedTweetJson).reduce((m, key, i, obj) => {
|
},
|
||||||
return m.concat(unprocessedTweetJson[i].tweet);
|
},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
const script = new vm.Script(fs.readFileSync(outputPath + '/data/tweets.js', 'utf-8'));
|
||||||
|
const context = vm.createContext(fakeWindow);
|
||||||
|
script.runInContext(context);
|
||||||
|
const tweets = Object.keys(fakeWindow.window.YTD.tweets.part0).reduce((m, key, i, obj) => {
|
||||||
|
return m.concat(fakeWindow.window.YTD.tweets.part0[key].tweet);
|
||||||
}, []);
|
}, []);
|
||||||
|
|
||||||
const processedTweets = await this.recreateChain(['id_str'], ['in_reply_to_status_id_str'], tweets, false);
|
const processedTweets = await this.recreateChain(['id_str'], ['in_reply_to_status_id_str'], tweets, false);
|
||||||
this.queueService.createImportTweetsToDbJob(job.data.user, processedTweets, null);
|
this.queueService.createImportTweetsToDbJob(job.data.user, processedTweets, null);
|
||||||
} else {
|
|
||||||
this.logger.warn('Failed to import twitter notes due to malformed file');
|
|
||||||
}
|
|
||||||
} finally {
|
} finally {
|
||||||
cleanup();
|
cleanup();
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue