mirror of
https://github.com/spacebarchat/server.git
synced 2024-11-06 10:52:31 +01:00
Merge branch 'master' into translation
This commit is contained in:
commit
7f1bda9492
@ -1,2 +1,2 @@
|
||||
node_modules/
|
||||
db/
|
||||
db/
|
||||
|
2
.github/FUNDING.yml
vendored
2
.github/FUNDING.yml
vendored
@ -1 +1 @@
|
||||
open_collective: fosscord
|
||||
open_collective: fosscord
|
||||
|
78
.github/workflows/docker-publish-api.yml
vendored
78
.github/workflows/docker-publish-api.yml
vendored
@ -1,47 +1,41 @@
|
||||
name: docker-publish-api
|
||||
|
||||
on:
|
||||
push:
|
||||
paths:
|
||||
- 'api/**'
|
||||
push:
|
||||
paths:
|
||||
- "api/**"
|
||||
|
||||
jobs:
|
||||
docker-api:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
-
|
||||
name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
-
|
||||
name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
-
|
||||
name: Cache Docker layers
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: /tmp/.buildx-cache/api
|
||||
key: ${{ runner.os }}-buildx-api-${{ github.sha }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-buildx-api-
|
||||
-
|
||||
name: Login to DockerHub
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
-
|
||||
name: Build and push
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: ./api
|
||||
push: true
|
||||
tags: ${{ secrets.DOCKERHUB_TAGS_API }}
|
||||
cache-from: type=local,src=/tmp/.buildx-cache/api
|
||||
cache-to: type=local,dest=/tmp/.buildx-cache-new/api
|
||||
-
|
||||
# Hackfix to cleanup cache; replace after buildx 0.6 and BuildKit 0.9 are GA
|
||||
# https://github.com/docker/build-push-action/pull/406#issuecomment-879184394
|
||||
name: Move cache fix
|
||||
run: |
|
||||
rm -rf /tmp/.buildx-cache/api
|
||||
mv /tmp/.buildx-cache-new/api /tmp/.buildx-cache/api
|
||||
docker-api:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
- name: Cache Docker layers
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: /tmp/.buildx-cache/api
|
||||
key: ${{ runner.os }}-buildx-api-${{ github.sha }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-buildx-api-
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: ./api
|
||||
push: true
|
||||
tags: ${{ secrets.DOCKERHUB_TAGS_API }}
|
||||
cache-from: type=local,src=/tmp/.buildx-cache/api
|
||||
cache-to: type=local,dest=/tmp/.buildx-cache-new/api
|
||||
- # Hackfix to cleanup cache; replace after buildx 0.6 and BuildKit 0.9 are GA
|
||||
# https://github.com/docker/build-push-action/pull/406#issuecomment-879184394
|
||||
name: Move cache fix
|
||||
run: |
|
||||
rm -rf /tmp/.buildx-cache/api
|
||||
mv /tmp/.buildx-cache-new/api /tmp/.buildx-cache/api
|
||||
|
78
.github/workflows/docker-publish-cdn.yml
vendored
78
.github/workflows/docker-publish-cdn.yml
vendored
@ -1,47 +1,41 @@
|
||||
name: docker-publish-cdn
|
||||
|
||||
on:
|
||||
push:
|
||||
paths:
|
||||
- 'cdn/**'
|
||||
push:
|
||||
paths:
|
||||
- "cdn/**"
|
||||
|
||||
jobs:
|
||||
docker-cdn:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
-
|
||||
name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
-
|
||||
name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
-
|
||||
name: Cache Docker layers
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: /tmp/.buildx-cache/cdn
|
||||
key: ${{ runner.os }}-buildx-cdn-${{ github.sha }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-buildx-cdn-
|
||||
-
|
||||
name: Login to DockerHub
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
-
|
||||
name: Build and push
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: ./cdn
|
||||
push: true
|
||||
tags: ${{ secrets.DOCKERHUB_TAGS_CDN }}
|
||||
cache-from: type=local,src=/tmp/.buildx-cache/cdn
|
||||
cache-to: type=local,dest=/tmp/.buildx-cache-new/cdn
|
||||
-
|
||||
# Hackfix to cleanup cache; replace after buildx 0.6 and BuildKit 0.9 are GA
|
||||
# https://github.com/docker/build-push-action/pull/406#issuecomment-879184394
|
||||
name: Move cache fix
|
||||
run: |
|
||||
rm -rf /tmp/.buildx-cache/cdn
|
||||
mv /tmp/.buildx-cache-new/cdn /tmp/.buildx-cache/cdn
|
||||
docker-cdn:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
- name: Cache Docker layers
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: /tmp/.buildx-cache/cdn
|
||||
key: ${{ runner.os }}-buildx-cdn-${{ github.sha }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-buildx-cdn-
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: ./cdn
|
||||
push: true
|
||||
tags: ${{ secrets.DOCKERHUB_TAGS_CDN }}
|
||||
cache-from: type=local,src=/tmp/.buildx-cache/cdn
|
||||
cache-to: type=local,dest=/tmp/.buildx-cache-new/cdn
|
||||
- # Hackfix to cleanup cache; replace after buildx 0.6 and BuildKit 0.9 are GA
|
||||
# https://github.com/docker/build-push-action/pull/406#issuecomment-879184394
|
||||
name: Move cache fix
|
||||
run: |
|
||||
rm -rf /tmp/.buildx-cache/cdn
|
||||
mv /tmp/.buildx-cache-new/cdn /tmp/.buildx-cache/cdn
|
||||
|
78
.github/workflows/docker-publish-gateway.yml
vendored
78
.github/workflows/docker-publish-gateway.yml
vendored
@ -1,47 +1,41 @@
|
||||
name: docker-publish-gw
|
||||
|
||||
on:
|
||||
push:
|
||||
paths:
|
||||
- 'gateway/**'
|
||||
push:
|
||||
paths:
|
||||
- "gateway/**"
|
||||
|
||||
jobs:
|
||||
docker-gw:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
-
|
||||
name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
-
|
||||
name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
-
|
||||
name: Cache Docker layers
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: /tmp/.buildx-cache/gw
|
||||
key: ${{ runner.os }}-buildx-gw-${{ github.sha }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-buildx-gw-
|
||||
-
|
||||
name: Login to DockerHub
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
-
|
||||
name: Build and push
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: ./gateway
|
||||
push: true
|
||||
tags: ${{ secrets.DOCKERHUB_TAGS_GW }}
|
||||
cache-from: type=local,src=/tmp/.buildx-cache/gw
|
||||
cache-to: type=local,dest=/tmp/.buildx-cache-new/gw
|
||||
-
|
||||
# Hackfix to cleanup cache; replace after buildx 0.6 and BuildKit 0.9 are GA
|
||||
# https://github.com/docker/build-push-action/pull/406#issuecomment-879184394
|
||||
name: Move cache fix
|
||||
run: |
|
||||
rm -rf /tmp/.buildx-cache/gw
|
||||
mv /tmp/.buildx-cache-new/gw /tmp/.buildx-cache/gw
|
||||
docker-gw:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
- name: Cache Docker layers
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: /tmp/.buildx-cache/gw
|
||||
key: ${{ runner.os }}-buildx-gw-${{ github.sha }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-buildx-gw-
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: ./gateway
|
||||
push: true
|
||||
tags: ${{ secrets.DOCKERHUB_TAGS_GW }}
|
||||
cache-from: type=local,src=/tmp/.buildx-cache/gw
|
||||
cache-to: type=local,dest=/tmp/.buildx-cache-new/gw
|
||||
- # Hackfix to cleanup cache; replace after buildx 0.6 and BuildKit 0.9 are GA
|
||||
# https://github.com/docker/build-push-action/pull/406#issuecomment-879184394
|
||||
name: Move cache fix
|
||||
run: |
|
||||
rm -rf /tmp/.buildx-cache/gw
|
||||
mv /tmp/.buildx-cache-new/gw /tmp/.buildx-cache/gw
|
||||
|
182
.github/workflows/release.yml
vendored
182
.github/workflows/release.yml
vendored
@ -1,101 +1,101 @@
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
# Sequence of patterns matched against refs/tags
|
||||
tags:
|
||||
- "v*" # Push events to matching v*, i.e. v1.0, v20.15.10
|
||||
workflow_dispatch:
|
||||
push:
|
||||
# Sequence of patterns matched against refs/tags
|
||||
tags:
|
||||
- "v*" # Push events to matching v*, i.e. v1.0, v20.15.10
|
||||
|
||||
name: Publish Release
|
||||
|
||||
jobs:
|
||||
insiders-build:
|
||||
strategy:
|
||||
matrix:
|
||||
os: [windows, macos, ubuntu]
|
||||
include:
|
||||
- os: windows
|
||||
file: fosscord-server-windows.exe
|
||||
package: ""
|
||||
artifact: fosscord-server-windows.exe
|
||||
- os: macos
|
||||
file: fosscord-server.app
|
||||
package: tar -czf 'fosscord-server-macos.app.tgz' 'fosscord-server.app'
|
||||
artifact: fosscord-server-macos.app.tgz
|
||||
- os: ubuntu
|
||||
file: fosscord
|
||||
package: chmod +x fosscord && tar -czf 'fosscord-server-linux.tgz' 'fosscord'
|
||||
artifact: fosscord-server-linux.tgz
|
||||
runs-on: ${{ matrix.os }}-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-node@v2
|
||||
env:
|
||||
MONGOMS_VERSION: 4.4.3
|
||||
with:
|
||||
node-version: 14
|
||||
- run: |
|
||||
cd bundle
|
||||
npm run setup
|
||||
npx caxa -i . -m 'This_may_take_a_while_to_run_the_first_time_please_wait...' --output '${{matrix.file}}' -- '{{caxa}}/node_modules/.bin/node' '{{caxa}}/dist/bundle/src/start.js'
|
||||
${{ matrix.package }}
|
||||
- uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: ${{ matrix.artifact }}
|
||||
path: bundle/${{ matrix.artifact }}
|
||||
insiders-build:
|
||||
strategy:
|
||||
matrix:
|
||||
os: [windows, macos, ubuntu]
|
||||
include:
|
||||
- os: windows
|
||||
file: fosscord-server-windows.exe
|
||||
package: ""
|
||||
artifact: fosscord-server-windows.exe
|
||||
- os: macos
|
||||
file: fosscord-server.app
|
||||
package: tar -czf 'fosscord-server-macos.app.tgz' 'fosscord-server.app'
|
||||
artifact: fosscord-server-macos.app.tgz
|
||||
- os: ubuntu
|
||||
file: fosscord
|
||||
package: chmod +x fosscord && tar -czf 'fosscord-server-linux.tgz' 'fosscord'
|
||||
artifact: fosscord-server-linux.tgz
|
||||
runs-on: ${{ matrix.os }}-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-node@v2
|
||||
env:
|
||||
MONGOMS_VERSION: 4.4.3
|
||||
with:
|
||||
node-version: 14
|
||||
- run: |
|
||||
cd bundle
|
||||
npm run setup
|
||||
npx caxa -i . -m 'This_may_take_a_while_to_run_the_first_time_please_wait...' --output '${{matrix.file}}' -- '{{caxa}}/node_modules/.bin/node' '{{caxa}}/dist/bundle/src/start.js'
|
||||
${{ matrix.package }}
|
||||
- uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: ${{ matrix.artifact }}
|
||||
path: bundle/${{ matrix.artifact }}
|
||||
|
||||
release:
|
||||
needs: [insiders-build]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: fosscord-server-windows.exe
|
||||
- uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: fosscord-server-macos.app.tgz
|
||||
- uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: fosscord-server-linux.tgz
|
||||
- uses: actions/create-release@v1
|
||||
id: create-release
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
tag_name: v${{ secrets.RELEASE_VERSION }}
|
||||
release_name: Server v${{ secrets.RELEASE_VERSION }}
|
||||
draft: false
|
||||
prerelease: true # TODO: change this to false
|
||||
body: >
|
||||
## Download
|
||||
release:
|
||||
needs: [insiders-build]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: fosscord-server-windows.exe
|
||||
- uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: fosscord-server-macos.app.tgz
|
||||
- uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: fosscord-server-linux.tgz
|
||||
- uses: actions/create-release@v1
|
||||
id: create-release
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
tag_name: v${{ secrets.RELEASE_VERSION }}
|
||||
release_name: Server v${{ secrets.RELEASE_VERSION }}
|
||||
draft: false
|
||||
prerelease: true # TODO: change this to false
|
||||
body: >
|
||||
## Download
|
||||
|
||||
- [Windows](https://github.com/fosscord/fosscord-server/releases/download/v${{ secrets.RELEASE_VERSION }}/fosscord-server-windows.exe)
|
||||
- [Windows](https://github.com/fosscord/fosscord-server/releases/download/v${{ secrets.RELEASE_VERSION }}/fosscord-server-windows.exe)
|
||||
|
||||
- [MacOS](https://github.com/fosscord/fosscord-server/releases/download/v${{ secrets.RELEASE_VERSION }}/fosscord-server-macos.app.tgz)
|
||||
- [MacOS](https://github.com/fosscord/fosscord-server/releases/download/v${{ secrets.RELEASE_VERSION }}/fosscord-server-macos.app.tgz)
|
||||
|
||||
- [Linux](https://github.com/fosscord/fosscord-server/releases/download/v${{ secrets.RELEASE_VERSION }}/fosscord-server-linux.tgz)
|
||||
- [Linux](https://github.com/fosscord/fosscord-server/releases/download/v${{ secrets.RELEASE_VERSION }}/fosscord-server-linux.tgz)
|
||||
|
||||
- uses: actions/upload-release-asset@v1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.create-release.outputs.upload_url }}
|
||||
asset_path: fosscord-server-windows.exe
|
||||
asset_name: fosscord-server-windows.exe
|
||||
asset_content_type: application/vnd.microsoft.portable-executable
|
||||
- uses: actions/upload-release-asset@v1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.create-release.outputs.upload_url }}
|
||||
asset_path: fosscord-server-macos.app.tgz
|
||||
asset_name: fosscord-server-macos.app.tgz
|
||||
asset_content_type: application/gzip
|
||||
- uses: actions/upload-release-asset@v1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.create-release.outputs.upload_url }}
|
||||
asset_path: fosscord-server-linux.tgz
|
||||
asset_name: fosscord-server-linux.tgz
|
||||
asset_content_type: application/gzip
|
||||
- uses: actions/upload-release-asset@v1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.create-release.outputs.upload_url }}
|
||||
asset_path: fosscord-server-windows.exe
|
||||
asset_name: fosscord-server-windows.exe
|
||||
asset_content_type: application/vnd.microsoft.portable-executable
|
||||
- uses: actions/upload-release-asset@v1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.create-release.outputs.upload_url }}
|
||||
asset_path: fosscord-server-macos.app.tgz
|
||||
asset_name: fosscord-server-macos.app.tgz
|
||||
asset_content_type: application/gzip
|
||||
- uses: actions/upload-release-asset@v1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.create-release.outputs.upload_url }}
|
||||
asset_path: fosscord-server-linux.tgz
|
||||
asset_name: fosscord-server-linux.tgz
|
||||
asset_content_type: application/gzip
|
||||
|
2
.gitignore
vendored
2
.gitignore
vendored
@ -8,4 +8,4 @@ database.db
|
||||
tsconfig.tsbuildinfo
|
||||
files/
|
||||
.env
|
||||
config.json
|
||||
config.json
|
||||
|
4
.prettierrc
Normal file
4
.prettierrc
Normal file
@ -0,0 +1,4 @@
|
||||
{
|
||||
"tabWidth": 4,
|
||||
"useTabs": true
|
||||
}
|
@ -1,4 +1,4 @@
|
||||
FROM nikolaik/python-nodejs:latest
|
||||
FROM node:14
|
||||
WORKDIR /usr/src/fosscord-server/
|
||||
COPY . .
|
||||
WORKDIR /usr/src/fosscord-server/bundle
|
||||
|
16
README.md
16
README.md
@ -18,13 +18,13 @@
|
||||
|
||||
This repository contains:
|
||||
|
||||
- [Fosscord HTTP API Server](https://github.com/fosscord/fosscord-server/tree/master/api)
|
||||
- [WebSocket Gateway Server](https://github.com/fosscord/fosscord-server/tree/master/gateway)
|
||||
- [HTTP CDN Server](https://github.com/fosscord/fosscord-server/tree/master/cdn)
|
||||
- [Utility and Database Models](https://github.com/fosscord/fosscord-server/tree/master/util)
|
||||
- [RTC Server](https://github.com/fosscord/fosscord-server/tree/master/rtc)
|
||||
- [WebRTC Server](https://github.com/fosscord/fosscord-server/tree/master/webrtc)
|
||||
- [Admin Dashboard](https://github.com/fosscord/fosscord-server/tree/master/dashboard)
|
||||
- [Fosscord HTTP API Server](/api)
|
||||
- [WebSocket Gateway Server](/gateway)
|
||||
- [HTTP CDN Server](/cdn)
|
||||
- [Utility and Database Models](/util)
|
||||
- [RTC Server](/rtc)
|
||||
- [WebRTC Server](/webrtc)
|
||||
- [Admin Dashboard](/dashboard)
|
||||
|
||||
## [Resources](https://docs.fosscord.com/resources/)
|
||||
|
||||
@ -32,4 +32,4 @@ This repository contains:
|
||||
|
||||
## [Setup](https://docs.fosscord.com/setup/server/)
|
||||
|
||||
- [Download](https://github.com/fosscord/fosscord-server/releases)
|
||||
- [Download](https://github.com/fosscord/fosscord-server/releases)
|
||||
|
10
api/.vscode/api-snippets.code-snippets
vendored
10
api/.vscode/api-snippets.code-snippets
vendored
@ -19,11 +19,7 @@
|
||||
"Route": {
|
||||
"scope": "typescript",
|
||||
"prefix": "route",
|
||||
"body": [
|
||||
"router.get(\"$1\", route({}), (req: Request, res: Response) => {",
|
||||
"\t$2",
|
||||
"});"
|
||||
],
|
||||
"body": ["router.get(\"$1\", route({}), (req: Request, res: Response) => {", "\t$2", "});"],
|
||||
"description": "An API endpoint"
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
17844
api/assets/schemas.json
17844
api/assets/schemas.json
File diff suppressed because it is too large
Load Diff
@ -105,8 +105,40 @@
|
||||
});
|
||||
}
|
||||
|
||||
const supportedLocales = [
|
||||
"bg",
|
||||
"cs",
|
||||
"da",
|
||||
"de",
|
||||
"el",
|
||||
"en-GB",
|
||||
"es-ES",
|
||||
"fi",
|
||||
"fr",
|
||||
"hi",
|
||||
"hr",
|
||||
"hu",
|
||||
"it",
|
||||
"ja",
|
||||
"ko",
|
||||
"lt",
|
||||
"nl",
|
||||
"no",
|
||||
"pl",
|
||||
"pt-BR",
|
||||
"ro",
|
||||
"ru",
|
||||
"sv-SE",
|
||||
"th",
|
||||
"tr",
|
||||
"uk",
|
||||
"vi",
|
||||
"zh-CN",
|
||||
"zh-TW"
|
||||
];
|
||||
|
||||
const settings = JSON.parse(localStorage.getItem("UserSettingsStore"));
|
||||
if (settings && settings.locale.length <= 2) {
|
||||
if (settings && !supportedLocales.includes(settings.locale)) {
|
||||
// fix client locale wrong and client not loading at all
|
||||
settings.locale = "en-US";
|
||||
localStorage.setItem("UserSettingsStore", JSON.stringify(settings));
|
||||
|
@ -1,3 +1,3 @@
|
||||
files:
|
||||
- source: /locales/en/*.json
|
||||
translation: /locales/%two_letters_code%/%original_file_name%
|
||||
- source: /locales/en/*.json
|
||||
translation: /locales/%two_letters_code%/%original_file_name%
|
||||
|
@ -1,18 +1,18 @@
|
||||
{
|
||||
"field": {
|
||||
"BASE_TYPE_REQUIRED": "This field is required",
|
||||
"BASE_TYPE_STRING": "This field must be a string",
|
||||
"BASE_TYPE_NUMBER": "This field must be a number",
|
||||
"BASE_TYPE_BIGINT": "This field must be a bigint",
|
||||
"BASE_TYPE_BOOLEAN": "This field must be a boolean",
|
||||
"BASE_TYPE_CHOICES": "This field must be one of ({{types}})",
|
||||
"BASE_TYPE_CLASS": "This field must be an instance of {{type}}",
|
||||
"BASE_TYPE_OBJECT": "This field must be an object",
|
||||
"BASE_TYPE_ARRAY": "This field must be an array",
|
||||
"UNKOWN_FIELD": "Unknown key: {{key}}",
|
||||
"BASE_TYPE_CONSTANT": "This field must be {{value}}",
|
||||
"EMAIL_TYPE_INVALID_EMAIL": "Not a well-formed email address",
|
||||
"DATE_TYPE_PARSE": "Could not parse {{date}}. Should be ISO8601",
|
||||
"BASE_TYPE_BAD_LENGTH": "Must be between {{length}} in length"
|
||||
"BASE_TYPE_REQUIRED": "Dieses Feld ist erforderlich",
|
||||
"BASE_TYPE_STRING": "Dieses Feld muss ein String sein",
|
||||
"BASE_TYPE_NUMBER": "Dieses Feld muss eine Zahl sein",
|
||||
"BASE_TYPE_BIGINT": "Dieses Feld muss ein bigint sein",
|
||||
"BASE_TYPE_BOOLEAN": "Dieses Feld muss ein boolean sein",
|
||||
"BASE_TYPE_CHOICES": "Dieses Feld muss eines von ({{types}}) sein",
|
||||
"BASE_TYPE_CLASS": "Dieses Feld muss {{type}} sein",
|
||||
"BASE_TYPE_OBJECT": "Dieses Feld muss ein Objekt sein",
|
||||
"BASE_TYPE_ARRAY": "Dieses Feld muss ein Array sein",
|
||||
"UNKOWN_FIELD": "Unbekanntes Feld: {{key}}",
|
||||
"BASE_TYPE_CONSTANT": "Dieses Feld muss {{value}} sein",
|
||||
"EMAIL_TYPE_INVALID_EMAIL": "Keine gültige E-Mail-Adresse",
|
||||
"DATE_TYPE_PARSE": "Konnte {{date}} nicht lesen. Muss ISO8601 entsprechen",
|
||||
"BASE_TYPE_BAD_LENGTH": "Muss zwischen {{length}} lang sein"
|
||||
}
|
||||
}
|
||||
|
@ -3,12 +3,12 @@
|
||||
"BASE_TYPE_REQUIRED": "Bu alan gereklidir",
|
||||
"BASE_TYPE_STRING": "Bu alan bir metin (string) olmalı",
|
||||
"BASE_TYPE_NUMBER": "Bu alan bir sayı olmalı",
|
||||
"BASE_TYPE_BIGINT": "Bu alan uzun tamsayı değeri olmalıdır",
|
||||
"BASE_TYPE_BOOLEAN": "Bu alan doğru/yanlış olmalıdır",
|
||||
"BASE_TYPE_CHOICES": "Bu alan ({{types}}) tiplerinden biri olmalı",
|
||||
"BASE_TYPE_BIGINT": "Bu alan büyük integer (bkz. bigint) değeri olmalıdır",
|
||||
"BASE_TYPE_BOOLEAN": "Bu alan mantıksal değer (boolean) olmalıdır",
|
||||
"BASE_TYPE_CHOICES": "Bu alan ({{types}}) lardan biri olmak zorunda",
|
||||
"BASE_TYPE_CLASS": "Bu alan {{type}} türünden olmalı",
|
||||
"BASE_TYPE_OBJECT": "Bu alan bir nesne olmalı",
|
||||
"BASE_TYPE_ARRAY": "Bu alan bir dizi olmalı",
|
||||
"BASE_TYPE_OBJECT": "Bu alan bir obje olmalı",
|
||||
"BASE_TYPE_ARRAY": "Bu alan bir dizi (array) olmalı",
|
||||
"UNKOWN_FIELD": "Bilinmeyen anahtar: {{key}}",
|
||||
"BASE_TYPE_CONSTANT": "Bu alan {{value}} olmalı",
|
||||
"EMAIL_TYPE_INVALID_EMAIL": "Geçerli bir e-posta adresi değil",
|
||||
|
9926
api/package-lock.json
generated
9926
api/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -86,6 +86,7 @@
|
||||
"multer": "^1.4.2",
|
||||
"node-fetch": "^2.6.1",
|
||||
"patch-package": "^6.4.7",
|
||||
"proxy-agent": "^5.0.0",
|
||||
"supertest": "^6.1.6",
|
||||
"typeorm": "^0.2.37"
|
||||
},
|
||||
|
14
api/src/global.d.ts
vendored
14
api/src/global.d.ts
vendored
@ -1,8 +1,8 @@
|
||||
declare global {
|
||||
namespace Express {
|
||||
interface Request {
|
||||
user_id: any;
|
||||
token: any;
|
||||
}
|
||||
}
|
||||
}
|
||||
namespace Express {
|
||||
interface Request {
|
||||
user_id: any;
|
||||
token: any;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -9,7 +9,7 @@ router.get("/", route({}), (req: Request, res: Response) => {
|
||||
// test that the database is alive & responding
|
||||
getConnection();
|
||||
return res.sendStatus(200);
|
||||
} catch(e) {
|
||||
} catch (e) {
|
||||
res.sendStatus(503);
|
||||
}
|
||||
});
|
||||
|
@ -9,7 +9,7 @@ router.get("/", route({}), (req: Request, res: Response) => {
|
||||
// test that the database is alive & responding
|
||||
getConnection();
|
||||
return res.sendStatus(200);
|
||||
} catch(e) {
|
||||
} catch (e) {
|
||||
res.sendStatus(503);
|
||||
}
|
||||
});
|
||||
|
@ -5,7 +5,7 @@ const router = Router();
|
||||
|
||||
router.get("/", route({}), (req: Request, res: Response) => {
|
||||
// TODO:
|
||||
//const { exclude_consumed } = req.query;
|
||||
//const { exclude_consumed } = req.query;
|
||||
res.status(200).send([]);
|
||||
});
|
||||
|
||||
|
@ -6,25 +6,23 @@ const router = Router();
|
||||
router.post("/", route({ permission: "MANAGE_MESSAGES" }), (req: Request, res: Response) => {
|
||||
// TODO:
|
||||
res.json({
|
||||
id: "",
|
||||
type: 0,
|
||||
content: "",
|
||||
channel_id: "",
|
||||
author: {id: "",
|
||||
username: "",
|
||||
avatar: "",
|
||||
discriminator: "", public_flags: 64},
|
||||
attachments: [],
|
||||
embeds: [],
|
||||
mentions: [],
|
||||
mention_roles: [],
|
||||
pinned: false,
|
||||
mention_everyone: false,
|
||||
tts: false,
|
||||
timestamp: "",
|
||||
edited_timestamp: null,
|
||||
flags: 1, components: []}).status(200);
|
||||
id: "",
|
||||
type: 0,
|
||||
content: "",
|
||||
channel_id: "",
|
||||
author: { id: "", username: "", avatar: "", discriminator: "", public_flags: 64 },
|
||||
attachments: [],
|
||||
embeds: [],
|
||||
mentions: [],
|
||||
mention_roles: [],
|
||||
pinned: false,
|
||||
mention_everyone: false,
|
||||
tts: false,
|
||||
timestamp: "",
|
||||
edited_timestamp: null,
|
||||
flags: 1,
|
||||
components: []
|
||||
}).status(200);
|
||||
});
|
||||
|
||||
export default router;
|
||||
|
||||
|
@ -1,6 +1,16 @@
|
||||
import { Request, Response, Router } from "express";
|
||||
import { Channel, ChannelRecipientAddEvent, ChannelType, DiscordApiErrors, DmChannelDTO, emitEvent, PublicUserProjection, Recipient, User } from "@fosscord/util";
|
||||
import { route } from "@fosscord/api"
|
||||
import {
|
||||
Channel,
|
||||
ChannelRecipientAddEvent,
|
||||
ChannelType,
|
||||
DiscordApiErrors,
|
||||
DmChannelDTO,
|
||||
emitEvent,
|
||||
PublicUserProjection,
|
||||
Recipient,
|
||||
User
|
||||
} from "@fosscord/util";
|
||||
import { route } from "@fosscord/api";
|
||||
|
||||
const router: Router = Router();
|
||||
|
||||
@ -9,20 +19,17 @@ router.put("/:user_id", route({}), async (req: Request, res: Response) => {
|
||||
const channel = await Channel.findOneOrFail({ where: { id: channel_id }, relations: ["recipients"] });
|
||||
|
||||
if (channel.type !== ChannelType.GROUP_DM) {
|
||||
const recipients = [
|
||||
...channel.recipients!.map(r => r.user_id),
|
||||
user_id
|
||||
].unique()
|
||||
const recipients = [...channel.recipients!.map((r) => r.user_id), user_id].unique();
|
||||
|
||||
const new_channel = await Channel.createDMChannel(recipients, req.user_id)
|
||||
const new_channel = await Channel.createDMChannel(recipients, req.user_id);
|
||||
return res.status(201).json(new_channel);
|
||||
} else {
|
||||
if (channel.recipients!.map(r => r.user_id).includes(user_id)) {
|
||||
throw DiscordApiErrors.INVALID_RECIPIENT //TODO is this the right error?
|
||||
if (channel.recipients!.map((r) => r.user_id).includes(user_id)) {
|
||||
throw DiscordApiErrors.INVALID_RECIPIENT; //TODO is this the right error?
|
||||
}
|
||||
|
||||
channel.recipients!.push(new Recipient({ channel_id: channel_id, user_id: user_id }));
|
||||
await channel.save()
|
||||
await channel.save();
|
||||
|
||||
await emitEvent({
|
||||
event: "CHANNEL_CREATE",
|
||||
@ -31,10 +38,12 @@ router.put("/:user_id", route({}), async (req: Request, res: Response) => {
|
||||
});
|
||||
|
||||
await emitEvent({
|
||||
event: "CHANNEL_RECIPIENT_ADD", data: {
|
||||
event: "CHANNEL_RECIPIENT_ADD",
|
||||
data: {
|
||||
channel_id: channel_id,
|
||||
user: await User.findOneOrFail({ where: { id: user_id }, select: PublicUserProjection })
|
||||
}, channel_id: channel_id
|
||||
},
|
||||
channel_id: channel_id
|
||||
} as ChannelRecipientAddEvent);
|
||||
return res.sendStatus(204);
|
||||
}
|
||||
@ -44,13 +53,13 @@ router.delete("/:user_id", route({}), async (req: Request, res: Response) => {
|
||||
const { channel_id, user_id } = req.params;
|
||||
const channel = await Channel.findOneOrFail({ where: { id: channel_id }, relations: ["recipients"] });
|
||||
if (!(channel.type === ChannelType.GROUP_DM && (channel.owner_id === req.user_id || user_id === req.user_id)))
|
||||
throw DiscordApiErrors.MISSING_PERMISSIONS
|
||||
throw DiscordApiErrors.MISSING_PERMISSIONS;
|
||||
|
||||
if (!channel.recipients!.map(r => r.user_id).includes(user_id)) {
|
||||
throw DiscordApiErrors.INVALID_RECIPIENT //TODO is this the right error?
|
||||
if (!channel.recipients!.map((r) => r.user_id).includes(user_id)) {
|
||||
throw DiscordApiErrors.INVALID_RECIPIENT; //TODO is this the right error?
|
||||
}
|
||||
|
||||
await Channel.removeRecipientFromChannel(channel, user_id)
|
||||
await Channel.removeRecipientFromChannel(channel, user_id);
|
||||
|
||||
return res.sendStatus(204);
|
||||
});
|
||||
|
@ -3,7 +3,6 @@ import { Guild, Config } from "@fosscord/util";
|
||||
import { Router, Request, Response } from "express";
|
||||
import { route } from "@fosscord/api";
|
||||
|
||||
|
||||
const router = Router();
|
||||
|
||||
router.get("/", route({}), async (req: Request, res: Response) => {
|
||||
@ -12,7 +11,9 @@ router.get("/", route({}), async (req: Request, res: Response) => {
|
||||
// ! this only works using SQL querys
|
||||
// TODO: implement this with default typeorm query
|
||||
// const guilds = await Guild.find({ where: { features: "DISCOVERABLE" } }); //, take: Math.abs(Number(limit)) });
|
||||
const guilds = showAllGuilds ? await Guild.find({take: Math.abs(Number(limit || 20))}) : await Guild.find({ where: `"features" LIKE '%COMMUNITY%'`, take: Math.abs(Number(limit || 20)) });
|
||||
const guilds = showAllGuilds
|
||||
? await Guild.find({ take: Math.abs(Number(limit || 20)) })
|
||||
: await Guild.find({ where: `"features" LIKE '%COMMUNITY%'`, take: Math.abs(Number(limit || 20)) });
|
||||
res.send({ guilds: guilds });
|
||||
});
|
||||
|
||||
|
@ -5,7 +5,7 @@ const router = Router();
|
||||
|
||||
router.get("/categories", route({}), (req: Request, res: Response) => {
|
||||
// TODO:
|
||||
//const { locale, primary_only } = req.query;
|
||||
//const { locale, primary_only } = req.query;
|
||||
res.json([]).status(200);
|
||||
});
|
||||
|
||||
|
@ -12,7 +12,7 @@ export interface GatewayBotResponse {
|
||||
remaining: number;
|
||||
reset_after: number;
|
||||
max_concurrency: number;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
const options: RouteOptions = {
|
||||
|
@ -1,5 +1,6 @@
|
||||
import { Router, Response, Request } from "express";
|
||||
import fetch from "node-fetch";
|
||||
import ProxyAgent from 'proxy-agent';
|
||||
import { route } from "@fosscord/api";
|
||||
import { getGifApiKey, parseGifResult } from "./trending";
|
||||
|
||||
@ -10,8 +11,11 @@ router.get("/", route({}), async (req: Request, res: Response) => {
|
||||
const { q, media_format, locale } = req.query;
|
||||
|
||||
const apiKey = getGifApiKey();
|
||||
|
||||
const agent = new ProxyAgent();
|
||||
|
||||
const response = await fetch(`https://g.tenor.com/v1/search?q=${q}&media_format=${media_format}&locale=${locale}&key=${apiKey}`, {
|
||||
agent,
|
||||
method: "get",
|
||||
headers: { "Content-Type": "application/json" }
|
||||
});
|
||||
|
@ -1,5 +1,6 @@
|
||||
import { Router, Response, Request } from "express";
|
||||
import fetch from "node-fetch";
|
||||
import ProxyAgent from 'proxy-agent';
|
||||
import { route } from "@fosscord/api";
|
||||
import { getGifApiKey, parseGifResult } from "./trending";
|
||||
|
||||
@ -10,8 +11,11 @@ router.get("/", route({}), async (req: Request, res: Response) => {
|
||||
const { media_format, locale } = req.query;
|
||||
|
||||
const apiKey = getGifApiKey();
|
||||
|
||||
const agent = new ProxyAgent();
|
||||
|
||||
const response = await fetch(`https://g.tenor.com/v1/trending?media_format=${media_format}&locale=${locale}&key=${apiKey}`, {
|
||||
agent,
|
||||
method: "get",
|
||||
headers: { "Content-Type": "application/json" }
|
||||
});
|
||||
|
@ -1,5 +1,6 @@
|
||||
import { Router, Response, Request } from "express";
|
||||
import fetch from "node-fetch";
|
||||
import ProxyAgent from 'proxy-agent';
|
||||
import { route } from "@fosscord/api";
|
||||
import { Config } from "@fosscord/util";
|
||||
import { HTTPError } from "lambert-server";
|
||||
@ -33,13 +34,17 @@ router.get("/", route({}), async (req: Request, res: Response) => {
|
||||
const { media_format, locale } = req.query;
|
||||
|
||||
const apiKey = getGifApiKey();
|
||||
|
||||
const agent = new ProxyAgent();
|
||||
|
||||
const [responseSource, trendGifSource] = await Promise.all([
|
||||
fetch(`https://g.tenor.com/v1/categories?locale=${locale}&key=${apiKey}`, {
|
||||
agent,
|
||||
method: "get",
|
||||
headers: { "Content-Type": "application/json" }
|
||||
}),
|
||||
fetch(`https://g.tenor.com/v1/trending?locale=${locale}&key=${apiKey}`, {
|
||||
agent,
|
||||
method: "get",
|
||||
headers: { "Content-Type": "application/json" }
|
||||
})
|
||||
|
@ -1,82 +1,82 @@
|
||||
import { Router, Request, Response } from "express";
|
||||
import { Guild, Member, Snowflake } from "@fosscord/util";
|
||||
import { LessThan, IsNull } from "typeorm";
|
||||
import { route } from "@fosscord/api";
|
||||
const router = Router();
|
||||
|
||||
//Returns all inactive members, respecting role hierarchy
|
||||
export const inactiveMembers = async (guild_id: string, user_id: string, days: number, roles: string[] = []) => {
|
||||
var date = new Date();
|
||||
date.setDate(date.getDate() - days);
|
||||
//Snowflake should have `generateFromTime` method? Or similar?
|
||||
var minId = BigInt(date.valueOf() - Snowflake.EPOCH) << BigInt(22);
|
||||
|
||||
var members = await Member.find({
|
||||
where: [
|
||||
{
|
||||
guild_id,
|
||||
last_message_id: LessThan(minId.toString())
|
||||
},
|
||||
{
|
||||
last_message_id: IsNull()
|
||||
}
|
||||
],
|
||||
relations: ["roles"]
|
||||
});
|
||||
console.log(members);
|
||||
if (!members.length) return [];
|
||||
|
||||
//I'm sure I can do this in the above db query ( and it would probably be better to do so ), but oh well.
|
||||
if (roles.length && members.length) members = members.filter((user) => user.roles?.some((role) => roles.includes(role.id)));
|
||||
|
||||
const me = await Member.findOneOrFail({ id: user_id, guild_id }, { relations: ["roles"] });
|
||||
const myHighestRole = Math.max(...(me.roles?.map((x) => x.position) || []));
|
||||
|
||||
const guild = await Guild.findOneOrFail({ where: { id: guild_id } });
|
||||
|
||||
members = members.filter(
|
||||
(member) =>
|
||||
member.id !== guild.owner_id && //can't kick owner
|
||||
member.roles?.some(
|
||||
(role) =>
|
||||
role.position < myHighestRole || //roles higher than me can't be kicked
|
||||
me.id === guild.owner_id //owner can kick anyone
|
||||
)
|
||||
);
|
||||
|
||||
return members;
|
||||
};
|
||||
|
||||
router.get("/", route({ permission: "KICK_MEMBERS" }), async (req: Request, res: Response) => {
|
||||
const days = parseInt(req.query.days as string);
|
||||
|
||||
var roles = req.query.include_roles;
|
||||
if (typeof roles === "string") roles = [roles]; //express will return array otherwise
|
||||
|
||||
const members = await inactiveMembers(req.params.guild_id, req.user_id, days, roles as string[]);
|
||||
|
||||
res.send({ pruned: members.length });
|
||||
});
|
||||
|
||||
export interface PruneSchema {
|
||||
/**
|
||||
* @min 0
|
||||
*/
|
||||
days: number;
|
||||
}
|
||||
|
||||
router.post("/", route({ permission: "KICK_MEMBERS" }), async (req: Request, res: Response) => {
|
||||
const days = parseInt(req.body.days);
|
||||
|
||||
var roles = req.query.include_roles;
|
||||
if (typeof roles === "string") roles = [roles];
|
||||
|
||||
const { guild_id } = req.params;
|
||||
const members = await inactiveMembers(guild_id, req.user_id, days, roles as string[]);
|
||||
|
||||
await Promise.all(members.map((x) => Member.removeFromGuild(x.id, guild_id)));
|
||||
|
||||
res.send({ purged: members.length });
|
||||
});
|
||||
|
||||
export default router;
|
||||
import { Router, Request, Response } from "express";
|
||||
import { Guild, Member, Snowflake } from "@fosscord/util";
|
||||
import { LessThan, IsNull } from "typeorm";
|
||||
import { route } from "@fosscord/api";
|
||||
const router = Router();
|
||||
|
||||
//Returns all inactive members, respecting role hierarchy
|
||||
export const inactiveMembers = async (guild_id: string, user_id: string, days: number, roles: string[] = []) => {
|
||||
var date = new Date();
|
||||
date.setDate(date.getDate() - days);
|
||||
//Snowflake should have `generateFromTime` method? Or similar?
|
||||
var minId = BigInt(date.valueOf() - Snowflake.EPOCH) << BigInt(22);
|
||||
|
||||
var members = await Member.find({
|
||||
where: [
|
||||
{
|
||||
guild_id,
|
||||
last_message_id: LessThan(minId.toString())
|
||||
},
|
||||
{
|
||||
last_message_id: IsNull()
|
||||
}
|
||||
],
|
||||
relations: ["roles"]
|
||||
});
|
||||
console.log(members);
|
||||
if (!members.length) return [];
|
||||
|
||||
//I'm sure I can do this in the above db query ( and it would probably be better to do so ), but oh well.
|
||||
if (roles.length && members.length) members = members.filter((user) => user.roles?.some((role) => roles.includes(role.id)));
|
||||
|
||||
const me = await Member.findOneOrFail({ id: user_id, guild_id }, { relations: ["roles"] });
|
||||
const myHighestRole = Math.max(...(me.roles?.map((x) => x.position) || []));
|
||||
|
||||
const guild = await Guild.findOneOrFail({ where: { id: guild_id } });
|
||||
|
||||
members = members.filter(
|
||||
(member) =>
|
||||
member.id !== guild.owner_id && //can't kick owner
|
||||
member.roles?.some(
|
||||
(role) =>
|
||||
role.position < myHighestRole || //roles higher than me can't be kicked
|
||||
me.id === guild.owner_id //owner can kick anyone
|
||||
)
|
||||
);
|
||||
|
||||
return members;
|
||||
};
|
||||
|
||||
router.get("/", route({ permission: "KICK_MEMBERS" }), async (req: Request, res: Response) => {
|
||||
const days = parseInt(req.query.days as string);
|
||||
|
||||
var roles = req.query.include_roles;
|
||||
if (typeof roles === "string") roles = [roles]; //express will return array otherwise
|
||||
|
||||
const members = await inactiveMembers(req.params.guild_id, req.user_id, days, roles as string[]);
|
||||
|
||||
res.send({ pruned: members.length });
|
||||
});
|
||||
|
||||
export interface PruneSchema {
|
||||
/**
|
||||
* @min 0
|
||||
*/
|
||||
days: number;
|
||||
}
|
||||
|
||||
router.post("/", route({ permission: "KICK_MEMBERS" }), async (req: Request, res: Response) => {
|
||||
const days = parseInt(req.body.days);
|
||||
|
||||
var roles = req.query.include_roles;
|
||||
if (typeof roles === "string") roles = [roles];
|
||||
|
||||
const { guild_id } = req.params;
|
||||
const members = await inactiveMembers(guild_id, req.user_id, days, roles as string[]);
|
||||
|
||||
await Promise.all(members.map((x) => Member.removeFromGuild(x.id, guild_id)));
|
||||
|
||||
res.send({ purged: members.length });
|
||||
});
|
||||
|
||||
export default router;
|
||||
|
@ -2,8 +2,8 @@ import { Router, Request, Response } from "express";
|
||||
import { route } from "@fosscord/api";
|
||||
const router = Router();
|
||||
|
||||
router.get("/",route({}), async (req: Request, res: Response) => {
|
||||
//TODO
|
||||
router.get("/", route({}), async (req: Request, res: Response) => {
|
||||
//TODO
|
||||
res.json([]);
|
||||
});
|
||||
|
||||
|
@ -6,19 +6,20 @@ const router: Router = Router();
|
||||
router.get("/", route({}), async (req: Request, res: Response) => {
|
||||
//TODO
|
||||
res.json([
|
||||
{
|
||||
id: "",
|
||||
name: "",
|
||||
interval: 1,
|
||||
interval_count: 1,
|
||||
tax_inclusive: true,
|
||||
sku_id: "",
|
||||
fallback_price: 499,
|
||||
fallback_currency: "eur",
|
||||
currency: "eur",
|
||||
price: 4199,
|
||||
price_tier: null
|
||||
}]).status(200);
|
||||
{
|
||||
id: "",
|
||||
name: "",
|
||||
interval: 1,
|
||||
interval_count: 1,
|
||||
tax_inclusive: true,
|
||||
sku_id: "",
|
||||
fallback_price: 499,
|
||||
fallback_currency: "eur",
|
||||
currency: "eur",
|
||||
price: 4199,
|
||||
price_tier: null
|
||||
}
|
||||
]).status(200);
|
||||
});
|
||||
|
||||
export default router;
|
||||
|
@ -4,17 +4,134 @@ import { route } from "@fosscord/api";
|
||||
const router: Router = Router();
|
||||
|
||||
const skus = new Map([
|
||||
["521842865731534868", [{"id": "511651856145973248", "name": "Premium Monthly (Legacy)", "interval": 1, "interval_count": 1, "tax_inclusive": true, "sku_id": "521842865731534868", "currency": "usd", "price": 0, "price_tier": null}, {"id": "511651860671627264", "name": "Premium Yearly (Legacy)", "interval": 2, "interval_count": 1, "tax_inclusive": true, "sku_id": "521842865731534868", "currency": "usd", "price": 0, "price_tier": null}]],
|
||||
["521846918637420545", [{"id": "511651871736201216", "name": "Premium Classic Monthly", "interval": 1, "interval_count": 1, "tax_inclusive": true, "sku_id": "521846918637420545", "currency": "usd", "price": 0, "price_tier": null}, {"id": "511651876987469824", "name": "Premium Classic Yearly", "interval": 2, "interval_count": 1, "tax_inclusive": true, "sku_id": "521846918637420545", "currency": "usd", "price": 0, "price_tier": null}]],
|
||||
["521847234246082599", [{"id": "642251038925127690", "name": "Premium Quarterly", "interval": 1, "interval_count": 3, "tax_inclusive": true, "sku_id": "521847234246082599", "currency": "usd", "price": 0, "price_tier": null}, {"id": "511651880837840896", "name": "Premium Monthly", "interval": 1, "interval_count": 1, "tax_inclusive": true, "sku_id": "521847234246082599", "currency": "usd", "price": 0, "price_tier": null}, {"id": "511651885459963904", "name": "Premium Yearly", "interval": 2, "interval_count": 1, "tax_inclusive": true, "sku_id": "521847234246082599", "currency": "usd", "price": 0, "price_tier": null}]],
|
||||
["590663762298667008", [{"id": "590665532894740483", "name": "Server Boost Monthly", "interval": 1, "interval_count": 1, "tax_inclusive": true, "sku_id": "590663762298667008", "discount_price": 0, "currency": "usd", "price": 0, "price_tier": null}, {"id": "590665538238152709", "name": "Server Boost Yearly", "interval": 2, "interval_count": 1, "tax_inclusive": true, "sku_id": "590663762298667008", "discount_price": 0, "currency": "usd", "price": 0, "price_tier": null}]],
|
||||
[
|
||||
"521842865731534868",
|
||||
[
|
||||
{
|
||||
id: "511651856145973248",
|
||||
name: "Premium Monthly (Legacy)",
|
||||
interval: 1,
|
||||
interval_count: 1,
|
||||
tax_inclusive: true,
|
||||
sku_id: "521842865731534868",
|
||||
currency: "usd",
|
||||
price: 0,
|
||||
price_tier: null
|
||||
},
|
||||
{
|
||||
id: "511651860671627264",
|
||||
name: "Premium Yearly (Legacy)",
|
||||
interval: 2,
|
||||
interval_count: 1,
|
||||
tax_inclusive: true,
|
||||
sku_id: "521842865731534868",
|
||||
currency: "usd",
|
||||
price: 0,
|
||||
price_tier: null
|
||||
}
|
||||
]
|
||||
],
|
||||
[
|
||||
"521846918637420545",
|
||||
[
|
||||
{
|
||||
id: "511651871736201216",
|
||||
name: "Premium Classic Monthly",
|
||||
interval: 1,
|
||||
interval_count: 1,
|
||||
tax_inclusive: true,
|
||||
sku_id: "521846918637420545",
|
||||
currency: "usd",
|
||||
price: 0,
|
||||
price_tier: null
|
||||
},
|
||||
{
|
||||
id: "511651876987469824",
|
||||
name: "Premium Classic Yearly",
|
||||
interval: 2,
|
||||
interval_count: 1,
|
||||
tax_inclusive: true,
|
||||
sku_id: "521846918637420545",
|
||||
currency: "usd",
|
||||
price: 0,
|
||||
price_tier: null
|
||||
}
|
||||
]
|
||||
],
|
||||
[
|
||||
"521847234246082599",
|
||||
[
|
||||
{
|
||||
id: "642251038925127690",
|
||||
name: "Premium Quarterly",
|
||||
interval: 1,
|
||||
interval_count: 3,
|
||||
tax_inclusive: true,
|
||||
sku_id: "521847234246082599",
|
||||
currency: "usd",
|
||||
price: 0,
|
||||
price_tier: null
|
||||
},
|
||||
{
|
||||
id: "511651880837840896",
|
||||
name: "Premium Monthly",
|
||||
interval: 1,
|
||||
interval_count: 1,
|
||||
tax_inclusive: true,
|
||||
sku_id: "521847234246082599",
|
||||
currency: "usd",
|
||||
price: 0,
|
||||
price_tier: null
|
||||
},
|
||||
{
|
||||
id: "511651885459963904",
|
||||
name: "Premium Yearly",
|
||||
interval: 2,
|
||||
interval_count: 1,
|
||||
tax_inclusive: true,
|
||||
sku_id: "521847234246082599",
|
||||
currency: "usd",
|
||||
price: 0,
|
||||
price_tier: null
|
||||
}
|
||||
]
|
||||
],
|
||||
[
|
||||
"590663762298667008",
|
||||
[
|
||||
{
|
||||
id: "590665532894740483",
|
||||
name: "Server Boost Monthly",
|
||||
interval: 1,
|
||||
interval_count: 1,
|
||||
tax_inclusive: true,
|
||||
sku_id: "590663762298667008",
|
||||
discount_price: 0,
|
||||
currency: "usd",
|
||||
price: 0,
|
||||
price_tier: null
|
||||
},
|
||||
{
|
||||
id: "590665538238152709",
|
||||
name: "Server Boost Yearly",
|
||||
interval: 2,
|
||||
interval_count: 1,
|
||||
tax_inclusive: true,
|
||||
sku_id: "590663762298667008",
|
||||
discount_price: 0,
|
||||
currency: "usd",
|
||||
price: 0,
|
||||
price_tier: null
|
||||
}
|
||||
]
|
||||
]
|
||||
]);
|
||||
|
||||
router.get("/", route({}), async (req: Request, res: Response) => {
|
||||
// TODO: add the ability to add custom
|
||||
const { sku_id } = req.params;
|
||||
|
||||
if(!skus.has(sku_id)) {
|
||||
|
||||
if (!skus.has(sku_id)) {
|
||||
console.log(`Request for invalid SKU ${sku_id}! Please report this!`);
|
||||
res.sendStatus(404);
|
||||
} else {
|
||||
|
@ -5,7 +5,7 @@ const router = Router();
|
||||
|
||||
router.get("/", route({}), (req: Request, res: Response) => {
|
||||
// TODO:
|
||||
res.json([]).status(200)
|
||||
res.json([]).status(200);
|
||||
});
|
||||
|
||||
export default router;
|
||||
|
@ -5,7 +5,7 @@ const router = Router();
|
||||
|
||||
router.get("/", route({}), (req: Request, res: Response) => {
|
||||
// TODO:
|
||||
res.json([]).status(200)
|
||||
res.json([]).status(200);
|
||||
});
|
||||
|
||||
export default router;
|
||||
|
@ -5,8 +5,11 @@ import { route } from "@fosscord/api";
|
||||
const router: Router = Router();
|
||||
|
||||
router.get("/", route({}), async (req: Request, res: Response) => {
|
||||
const recipients = await Recipient.find({ where: { user_id: req.user_id, closed: false }, relations: ["channel", "channel.recipients"] });
|
||||
res.json(await Promise.all(recipients.map(r => DmChannelDTO.from(r.channel, [req.user_id]))));
|
||||
const recipients = await Recipient.find({
|
||||
where: { user_id: req.user_id, closed: false },
|
||||
relations: ["channel", "channel.recipients"]
|
||||
});
|
||||
res.json(await Promise.all(recipients.map((r) => DmChannelDTO.from(r.channel, [req.user_id]))));
|
||||
});
|
||||
|
||||
export interface DmChannelCreateSchema {
|
||||
|
@ -4,14 +4,15 @@ import { route } from "@fosscord/api";
|
||||
const router = Router();
|
||||
|
||||
router.get("/", route({}), (req: Request, res: Response) => {
|
||||
// TODO:
|
||||
// TODO:
|
||||
res.json({
|
||||
categories: {
|
||||
social: true,
|
||||
communication: true,
|
||||
tips: false,
|
||||
updates_and_announcements: false,
|
||||
recommendations_and_events: false },
|
||||
categories: {
|
||||
social: true,
|
||||
communication: true,
|
||||
tips: false,
|
||||
updates_and_announcements: false,
|
||||
recommendations_and_events: false
|
||||
},
|
||||
initialized: false
|
||||
}).status(200);
|
||||
});
|
||||
|
@ -33,7 +33,7 @@ const DEFAULT_FETCH_OPTIONS: any = {
|
||||
redirect: "follow",
|
||||
follow: 1,
|
||||
headers: {
|
||||
"user-agent": "Mozilla/5.0 (compatible; Discordbot/2.0; +https://discordapp.com)"
|
||||
"user-agent": "Mozilla/5.0 (compatible; Fosscord/1.0; +https://github.com/fosscord/fosscord)"
|
||||
},
|
||||
size: 1024 * 1024 * 1,
|
||||
compress: true,
|
||||
|
@ -81,18 +81,15 @@ export function getIpAdress(req: Request): string {
|
||||
return req.headers[Config.get().security.forwadedFor] || req.socket.remoteAddress;
|
||||
}
|
||||
|
||||
|
||||
export function distanceBetweenLocations(loc1: any, loc2: any): number {
|
||||
return distanceBetweenCoords(loc1.latitude, loc1.longitude, loc2.latitude, loc2.longitude);
|
||||
}
|
||||
|
||||
//Haversine function
|
||||
function distanceBetweenCoords(lat1: number, lon1: number, lat2: number, lon2: number) {
|
||||
const p = 0.017453292519943295; // Math.PI / 180
|
||||
const p = 0.017453292519943295; // Math.PI / 180
|
||||
const c = Math.cos;
|
||||
const a = 0.5 - c((lat2 - lat1) * p) / 2 +
|
||||
c(lat1 * p) * c(lat2 * p) *
|
||||
(1 - c((lon2 - lon1) * p)) / 2;
|
||||
const a = 0.5 - c((lat2 - lat1) * p) / 2 + (c(lat1 * p) * c(lat2 * p) * (1 - c((lon2 - lon1) * p))) / 2;
|
||||
|
||||
return 12742 * Math.asin(Math.sqrt(a)); // 2 * R; R = 6371 km
|
||||
}
|
||||
}
|
||||
|
4
bundle/.prettierrc
Normal file
4
bundle/.prettierrc
Normal file
@ -0,0 +1,4 @@
|
||||
{
|
||||
"useTabs": true,
|
||||
"tabWidth": 4
|
||||
}
|
4346
bundle/package-lock.json
generated
4346
bundle/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -92,11 +92,13 @@
|
||||
"node-os-utils": "^1.3.5",
|
||||
"patch-package": "^6.4.7",
|
||||
"pg": "^8.7.1",
|
||||
"proxy-agent": "^5.0.0",
|
||||
"reflect-metadata": "^0.1.13",
|
||||
"sqlite3": "^5.0.2",
|
||||
"supertest": "^6.1.6",
|
||||
"typeorm": "^0.2.37",
|
||||
"typescript": "^4.1.2",
|
||||
"typescript-json-schema": "^0.50.1",
|
||||
"ws": "^7.4.2"
|
||||
}
|
||||
}
|
||||
}
|
@ -18,15 +18,24 @@ if (argv.includes("clean")) {
|
||||
});
|
||||
}
|
||||
|
||||
fse.copySync(path.join(__dirname, "..", "..", "api", "assets"), path.join(__dirname, "..", "dist", "api", "assets"));
|
||||
fse.copySync(
|
||||
path.join(__dirname, "..", "..", "api", "assets"),
|
||||
path.join(__dirname, "..", "dist", "api", "assets")
|
||||
);
|
||||
fse.copySync(
|
||||
path.join(__dirname, "..", "..", "api", "client_test"),
|
||||
path.join(__dirname, "..", "dist", "api", "client_test")
|
||||
);
|
||||
fse.copySync(path.join(__dirname, "..", "..", "api", "locales"), path.join(__dirname, "..", "dist", "api", "locales"));
|
||||
fse.copySync(
|
||||
path.join(__dirname, "..", "..", "api", "locales"),
|
||||
path.join(__dirname, "..", "dist", "api", "locales")
|
||||
);
|
||||
dirs.forEach((a) => {
|
||||
fse.copySync("../" + a + "/src", "dist/" + a + "/src");
|
||||
if (verbose) console.log(`Copied ${"../" + a + "/dist"} -> ${"dist/" + a + "/src"}!`);
|
||||
if (verbose)
|
||||
console.log(
|
||||
`Copied ${"../" + a + "/dist"} -> ${"dist/" + a + "/src"}!`
|
||||
);
|
||||
});
|
||||
|
||||
console.log("Copying src files done");
|
||||
@ -35,7 +44,14 @@ console.log("Compiling src files ...");
|
||||
console.log(
|
||||
execSync(
|
||||
'node "' +
|
||||
path.join(__dirname, "..", "node_modules", "typescript", "lib", "tsc.js") +
|
||||
path.join(
|
||||
__dirname,
|
||||
"..",
|
||||
"node_modules",
|
||||
"typescript",
|
||||
"lib",
|
||||
"tsc.js"
|
||||
) +
|
||||
'" -p "' +
|
||||
path.join(__dirname, "..") +
|
||||
'"',
|
||||
|
@ -5,10 +5,19 @@ const parts = ["api", "util", "cdn", "gateway"];
|
||||
const bundle = require("../package.json");
|
||||
|
||||
for (const part of parts) {
|
||||
const { devDependencies, dependencies } = require(path.join("..", "..", part, "package.json"));
|
||||
const { devDependencies, dependencies } = require(path.join(
|
||||
"..",
|
||||
"..",
|
||||
part,
|
||||
"package.json"
|
||||
));
|
||||
bundle.devDependencies = { ...bundle.devDependencies, ...devDependencies };
|
||||
bundle.dependencies = { ...bundle.dependencies, ...dependencies };
|
||||
delete bundle.dependencies["@fosscord/util"];
|
||||
}
|
||||
|
||||
fs.writeFileSync(path.join(__dirname, "..", "package.json"), JSON.stringify(bundle, null, "\t"), { encoding: "utf8" });
|
||||
fs.writeFileSync(
|
||||
path.join(__dirname, "..", "package.json"),
|
||||
JSON.stringify(bundle, null, "\t"),
|
||||
{ encoding: "utf8" }
|
||||
);
|
||||
|
@ -61,8 +61,12 @@ Current commit: ${
|
||||
|
||||
// Fork workers.
|
||||
for (let i = 0; i < cores; i++) {
|
||||
cluster.fork();
|
||||
console.log(`[Process] worker ${i} started.`);
|
||||
// Delay each worker start if using sqlite database to prevent locking it
|
||||
let delay = process.env.DATABASE?.includes("://") ? 0 : i * 1000;
|
||||
setTimeout(() => {
|
||||
cluster.fork();
|
||||
console.log(`[Process] worker ${i} started.`);
|
||||
}, delay);
|
||||
}
|
||||
|
||||
cluster.on("message", (sender: Worker, message: any) => {
|
||||
|
@ -8,7 +8,9 @@
|
||||
"incremental": false /* Enable incremental compilation */,
|
||||
"target": "ES6" /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019', 'ES2020', or 'ESNEXT'. */,
|
||||
"module": "commonjs" /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', 'es2020', or 'ESNext'. */,
|
||||
"lib": ["ES2021"] /* Specify library files to be included in the compilation. */,
|
||||
"lib": [
|
||||
"ES2021"
|
||||
] /* Specify library files to be included in the compilation. */,
|
||||
"allowJs": true /* Allow javascript files to be compiled. */,
|
||||
"checkJs": true /* Report errors in .js files. */,
|
||||
// "jsx": "preserve", /* Specify JSX code generation: 'preserve', 'react-native', or 'react'. */
|
||||
@ -46,7 +48,9 @@
|
||||
// "moduleResolution": "node", /* Specify module resolution strategy: 'node' (Node.js) or 'classic' (TypeScript pre-1.6). */
|
||||
// "rootDirs": [], /* List of root folders whose combined content represents the structure of the project at runtime. */
|
||||
// "typeRoots": [], /* List of folders to include type definitions from. */
|
||||
"types": ["node"] /* Type declaration files to be included in compilation. */,
|
||||
"types": [
|
||||
"node"
|
||||
] /* Type declaration files to be included in compilation. */,
|
||||
// "allowSyntheticDefaultImports": true, /* Allow default imports from modules with no default export. This does not affect code emit, just typechecking. */
|
||||
"esModuleInterop": true /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */,
|
||||
// "preserveSymlinks": true, /* Do not resolve the real path of symlinks. */
|
||||
|
@ -1,20 +1,26 @@
|
||||
# Fosscord-CDN
|
||||
|
||||
CDN for Fosscord
|
||||
|
||||
## Run localy:
|
||||
|
||||
```
|
||||
npm i
|
||||
node dist/
|
||||
```
|
||||
|
||||
## Endpoints:
|
||||
|
||||
### POST `/attachments/<filename>`
|
||||
|
||||
```
|
||||
Content-Type: form-data
|
||||
|
||||
attachment: File (binary-data)
|
||||
```
|
||||
|
||||
##### Returns:
|
||||
|
||||
```
|
||||
{
|
||||
"success": boolean, // true
|
||||
@ -23,20 +29,28 @@ attachment: File (binary-data)
|
||||
"filename": string // "lakdoiauej.png"
|
||||
}
|
||||
```
|
||||
|
||||
### GET `/attachments/<id>/<filename>`
|
||||
|
||||
```
|
||||
requests image from database with given <id> and <filename>
|
||||
```
|
||||
|
||||
##### Returns:
|
||||
|
||||
```
|
||||
Content-Type: image/<imageType(png,img,gif)>
|
||||
Image
|
||||
```
|
||||
|
||||
### DELETE `/attachments/<id>/<filename>`
|
||||
|
||||
```
|
||||
deletes database entry
|
||||
```
|
||||
|
||||
##### Returns:
|
||||
|
||||
```
|
||||
Content-Type: application/json
|
||||
|
||||
@ -49,7 +63,8 @@ Content-Type: application/json
|
||||
<hr>
|
||||
|
||||
_(endpoints for crawler):_
|
||||
### POST `/external`
|
||||
|
||||
### POST `/external`
|
||||
|
||||
```
|
||||
requests crawling of `og:`metadata and the download of the `og:image` property
|
||||
@ -59,7 +74,9 @@ Content-Type: application/json
|
||||
body:
|
||||
{"url": URL} // "https://discord.com"
|
||||
```
|
||||
|
||||
##### Returns:
|
||||
|
||||
```
|
||||
Content-Type: application/json
|
||||
|
||||
@ -72,17 +89,23 @@ Content-Type: application/json
|
||||
"ogType": string // "website"
|
||||
}
|
||||
```
|
||||
|
||||
### GET `/external/<id>/<filename>`
|
||||
- requests cached crawled image
|
||||
|
||||
- requests cached crawled image
|
||||
|
||||
```
|
||||
url-params:
|
||||
:id // aHR0cHM6Ly9kaXNjb3JkLmNvbQ==
|
||||
:filename // discord.png
|
||||
```
|
||||
|
||||
```
|
||||
/external/aHR0cHM6Ly9kaXNjb3JkLmNvbQ==/discord.png
|
||||
```
|
||||
|
||||
##### Returns:
|
||||
|
||||
```
|
||||
Content-Type: image/<imageType(png,img,gif)>
|
||||
Image
|
||||
|
45456
cdn/package-lock.json
generated
45456
cdn/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
130
cdn/package.json
130
cdn/package.json
@ -1,67 +1,67 @@
|
||||
{
|
||||
"name": "@fosscord/cdn",
|
||||
"version": "1.0.0",
|
||||
"description": "cdn for fosscord",
|
||||
"main": "dist/index.js",
|
||||
"types": "src/index.ts",
|
||||
"scripts": {
|
||||
"test": "npm run build && jest --coverage ./tests",
|
||||
"build": "npx tsc -p .",
|
||||
"start": "node dist/start.js"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/fosscord/fosscord-server.git"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"bugs": {
|
||||
"url": "https://github.com/fosscord/fosscord-server/issues"
|
||||
},
|
||||
"homepage": "https://github.com/fosscord/fosscord-server#readme",
|
||||
"devDependencies": {
|
||||
"@types/amqplib": "^0.8.1",
|
||||
"@types/body-parser": "^1.19.0",
|
||||
"@types/btoa": "^1.2.3",
|
||||
"@types/dotenv": "^8.2.0",
|
||||
"@types/express": "^4.17.12",
|
||||
"@types/fs-extra": "^9.0.12",
|
||||
"@types/jsonwebtoken": "^8.5.0",
|
||||
"@types/multer": "^1.4.7",
|
||||
"@types/node": "^14.17.0",
|
||||
"@types/node-fetch": "^2.5.7",
|
||||
"@zerollup/ts-transform-paths": "^1.7.18",
|
||||
"ts-patch": "^1.4.4"
|
||||
},
|
||||
"dependencies": {
|
||||
"@aws-sdk/client-s3": "^3.36.1",
|
||||
"@aws-sdk/node-http-handler": "^3.36.0",
|
||||
"@fosscord/util": "file:../util",
|
||||
"body-parser": "^1.19.0",
|
||||
"btoa": "^1.2.1",
|
||||
"dotenv": "^10.0.0",
|
||||
"exif-be-gone": "^1.2.0",
|
||||
"express": "^4.17.1",
|
||||
"express-async-errors": "^3.1.1",
|
||||
"file-type": "^16.5.0",
|
||||
"form-data": "^4.0.0",
|
||||
"fs-extra": "^10.0.0",
|
||||
"image-size": "^1.0.0",
|
||||
"jest": "^27.0.6",
|
||||
"lambert-db": "^1.2.3",
|
||||
"lambert-server": "^1.2.12",
|
||||
"missing-native-js-functions": "^1.2.17",
|
||||
"multer": "^1.4.2",
|
||||
"nanocolors": "^0.2.12",
|
||||
"node-fetch": "^2.6.1",
|
||||
"supertest": "^6.1.6",
|
||||
"typescript": "^4.1.2"
|
||||
},
|
||||
"jest": {
|
||||
"setupFilesAfterEnv": [
|
||||
"<rootDir>/jest/setup.js"
|
||||
],
|
||||
"verbose": true
|
||||
}
|
||||
"name": "@fosscord/cdn",
|
||||
"version": "1.0.0",
|
||||
"description": "cdn for fosscord",
|
||||
"main": "dist/index.js",
|
||||
"types": "src/index.ts",
|
||||
"scripts": {
|
||||
"test": "npm run build && jest --coverage ./tests",
|
||||
"build": "npx tsc -p .",
|
||||
"start": "node dist/start.js"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/fosscord/fosscord-server.git"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"bugs": {
|
||||
"url": "https://github.com/fosscord/fosscord-server/issues"
|
||||
},
|
||||
"homepage": "https://github.com/fosscord/fosscord-server#readme",
|
||||
"devDependencies": {
|
||||
"@types/amqplib": "^0.8.1",
|
||||
"@types/body-parser": "^1.19.0",
|
||||
"@types/btoa": "^1.2.3",
|
||||
"@types/dotenv": "^8.2.0",
|
||||
"@types/express": "^4.17.12",
|
||||
"@types/fs-extra": "^9.0.12",
|
||||
"@types/jsonwebtoken": "^8.5.0",
|
||||
"@types/multer": "^1.4.7",
|
||||
"@types/node": "^14.17.0",
|
||||
"@types/node-fetch": "^2.5.7",
|
||||
"@zerollup/ts-transform-paths": "^1.7.18",
|
||||
"ts-patch": "^1.4.4"
|
||||
},
|
||||
"dependencies": {
|
||||
"@aws-sdk/client-s3": "^3.36.1",
|
||||
"@aws-sdk/node-http-handler": "^3.36.0",
|
||||
"@fosscord/util": "file:../util",
|
||||
"body-parser": "^1.19.0",
|
||||
"btoa": "^1.2.1",
|
||||
"dotenv": "^10.0.0",
|
||||
"exif-be-gone": "^1.2.0",
|
||||
"express": "^4.17.1",
|
||||
"express-async-errors": "^3.1.1",
|
||||
"file-type": "^16.5.0",
|
||||
"form-data": "^4.0.0",
|
||||
"fs-extra": "^10.0.0",
|
||||
"image-size": "^1.0.0",
|
||||
"jest": "^27.0.6",
|
||||
"lambert-db": "^1.2.3",
|
||||
"lambert-server": "^1.2.12",
|
||||
"missing-native-js-functions": "^1.2.17",
|
||||
"multer": "^1.4.2",
|
||||
"nanocolors": "^0.2.12",
|
||||
"node-fetch": "^2.6.1",
|
||||
"supertest": "^6.1.6",
|
||||
"typescript": "^4.1.2"
|
||||
},
|
||||
"jest": {
|
||||
"setupFilesAfterEnv": [
|
||||
"<rootDir>/jest/setup.js"
|
||||
],
|
||||
"verbose": true
|
||||
}
|
||||
}
|
||||
|
@ -5,30 +5,30 @@ import { Storage } from "./Storage";
|
||||
const readableToBuffer = (readable: Readable): Promise<Buffer> =>
|
||||
new Promise((resolve, reject) => {
|
||||
const chunks: Buffer[] = [];
|
||||
readable.on('data', chunk => chunks.push(chunk));
|
||||
readable.on('error', reject);
|
||||
readable.on('end', () => resolve(Buffer.concat(chunks)));
|
||||
readable.on("data", (chunk) => chunks.push(chunk));
|
||||
readable.on("error", reject);
|
||||
readable.on("end", () => resolve(Buffer.concat(chunks)));
|
||||
});
|
||||
|
||||
export class S3Storage implements Storage {
|
||||
public constructor(
|
||||
private client: S3,
|
||||
private bucket: string,
|
||||
private basePath?: string,
|
||||
private basePath?: string
|
||||
) {}
|
||||
|
||||
/**
|
||||
* Always return a string, to ensure consistency.
|
||||
*/
|
||||
get bucketBasePath() {
|
||||
return this.basePath ?? '';
|
||||
return this.basePath ?? "";
|
||||
}
|
||||
|
||||
async set(path: string, data: Buffer): Promise<void> {
|
||||
await this.client.putObject({
|
||||
Bucket: this.bucket,
|
||||
Key: `${this.bucketBasePath}${path}`,
|
||||
Body: data
|
||||
Body: data,
|
||||
});
|
||||
}
|
||||
|
||||
@ -36,15 +36,15 @@ export class S3Storage implements Storage {
|
||||
try {
|
||||
const s3Object = await this.client.getObject({
|
||||
Bucket: this.bucket,
|
||||
Key: `${this.bucketBasePath ?? ''}${path}`
|
||||
Key: `${this.bucketBasePath ?? ""}${path}`,
|
||||
});
|
||||
|
||||
if (!s3Object.Body) return null;
|
||||
|
||||
const body = s3Object.Body;
|
||||
|
||||
return await readableToBuffer(<Readable> body);
|
||||
} catch(err) {
|
||||
return await readableToBuffer(<Readable>body);
|
||||
} catch (err) {
|
||||
console.error(`[CDN] Unable to get S3 object at path ${path}.`);
|
||||
console.error(err);
|
||||
return null;
|
||||
@ -54,7 +54,7 @@ export class S3Storage implements Storage {
|
||||
async delete(path: string): Promise<void> {
|
||||
await this.client.deleteObject({
|
||||
Bucket: this.bucket,
|
||||
Key: `${this.bucketBasePath}${path}`
|
||||
Key: `${this.bucketBasePath}${path}`,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
@ -2,7 +2,7 @@ import { FileStorage } from "./FileStorage";
|
||||
import path from "path";
|
||||
import fse from "fs-extra";
|
||||
import { bgCyan, black } from "nanocolors";
|
||||
import { S3 } from '@aws-sdk/client-s3';
|
||||
import { S3 } from "@aws-sdk/client-s3";
|
||||
import { S3Storage } from "./S3Storage";
|
||||
process.cwd();
|
||||
|
||||
@ -27,17 +27,20 @@ if (process.env.STORAGE_PROVIDER === "file" || !process.env.STORAGE_PROVIDER) {
|
||||
|
||||
storage = new FileStorage();
|
||||
} else if (process.env.STORAGE_PROVIDER === "s3") {
|
||||
const
|
||||
region = process.env.STORAGE_REGION,
|
||||
const region = process.env.STORAGE_REGION,
|
||||
bucket = process.env.STORAGE_BUCKET;
|
||||
|
||||
if (!region) {
|
||||
console.error(`[CDN] You must provide a region when using the S3 storage provider.`);
|
||||
console.error(
|
||||
`[CDN] You must provide a region when using the S3 storage provider.`
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
if (!bucket) {
|
||||
console.error(`[CDN] You must provide a bucket when using the S3 storage provider.`);
|
||||
console.error(
|
||||
`[CDN] You must provide a bucket when using the S3 storage provider.`
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
@ -45,8 +48,10 @@ if (process.env.STORAGE_PROVIDER === "file" || !process.env.STORAGE_PROVIDER) {
|
||||
let location = process.env.STORAGE_LOCATION;
|
||||
|
||||
if (!location) {
|
||||
console.warn(`[CDN] STORAGE_LOCATION unconfigured for S3 provider, defaulting to the bucket root...`);
|
||||
location = undefined;
|
||||
console.warn(
|
||||
`[CDN] STORAGE_LOCATION unconfigured for S3 provider, defaulting to the bucket root...`
|
||||
);
|
||||
location = undefined;
|
||||
}
|
||||
|
||||
const client = new S3({ region });
|
||||
|
@ -11,7 +11,12 @@ if (!process.env.STORAGE_PROVIDER) process.env.STORAGE_PROVIDER = "file";
|
||||
if (process.env.STORAGE_PROVIDER === "file") {
|
||||
if (process.env.STORAGE_LOCATION) {
|
||||
if (!process.env.STORAGE_LOCATION.startsWith("/")) {
|
||||
process.env.STORAGE_LOCATION = path.join(__dirname, "..", process.env.STORAGE_LOCATION, "/");
|
||||
process.env.STORAGE_LOCATION = path.join(
|
||||
__dirname,
|
||||
"..",
|
||||
process.env.STORAGE_LOCATION,
|
||||
"/"
|
||||
);
|
||||
}
|
||||
} else {
|
||||
process.env.STORAGE_LOCATION = path.join(__dirname, "..", "files", "/");
|
||||
@ -67,7 +72,9 @@ describe("/attachments", () => {
|
||||
.set({ signature: Config.get().security.requestSignature })
|
||||
.attach("file", __dirname + "/antman.jpg");
|
||||
expect(response.statusCode).toBe(200);
|
||||
expect(response.headers["content-type"]).toEqual(expect.stringContaining("json"));
|
||||
expect(response.headers["content-type"]).toEqual(
|
||||
expect.stringContaining("json")
|
||||
);
|
||||
expect(response.body.url).toBeDefined();
|
||||
});
|
||||
});
|
||||
@ -79,9 +86,11 @@ describe("/attachments", () => {
|
||||
.post("/attachments/123456789")
|
||||
.set({ signature: Config.get().security.requestSignature })
|
||||
.attach("file", __dirname + "/antman.jpg");
|
||||
request.get(response.body.url.replace("http://localhost:3003", "")).then((x) => {
|
||||
expect(x.statusCode).toBe(200);
|
||||
});
|
||||
request
|
||||
.get(response.body.url.replace("http://localhost:3003", ""))
|
||||
.then((x) => {
|
||||
expect(x.statusCode).toBe(200);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
@ -92,9 +101,13 @@ describe("/attachments", () => {
|
||||
.post("/attachments/123456789")
|
||||
.set({ signature: Config.get().security.requestSignature })
|
||||
.attach("file", __dirname + "/antman.jpg");
|
||||
request.delete(response.body.url.replace("http://localhost:3003", "")).then((x) => {
|
||||
expect(x.body.success).toBeDefined();
|
||||
});
|
||||
request
|
||||
.delete(
|
||||
response.body.url.replace("http://localhost:3003", "")
|
||||
)
|
||||
.then((x) => {
|
||||
expect(x.body.success).toBeDefined();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
@ -123,7 +136,9 @@ describe("/avatars", () => {
|
||||
.set({ signature: Config.get().security.requestSignature })
|
||||
.attach("file", __dirname + "/antman.jpg");
|
||||
expect(response.statusCode).toBe(200);
|
||||
expect(response.headers["content-type"]).toEqual(expect.stringContaining("json"));
|
||||
expect(response.headers["content-type"]).toEqual(
|
||||
expect.stringContaining("json")
|
||||
);
|
||||
expect(response.body.url).toBeDefined();
|
||||
});
|
||||
});
|
||||
@ -135,9 +150,11 @@ describe("/avatars", () => {
|
||||
.post("/avatars/123456789")
|
||||
.set({ signature: Config.get().security.requestSignature })
|
||||
.attach("file", __dirname + "/antman.jpg");
|
||||
request.get(response.body.url.replace("http://localhost:3003", "")).then((x) => {
|
||||
expect(x.statusCode).toBe(200);
|
||||
});
|
||||
request
|
||||
.get(response.body.url.replace("http://localhost:3003", ""))
|
||||
.then((x) => {
|
||||
expect(x.statusCode).toBe(200);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
@ -148,9 +165,13 @@ describe("/avatars", () => {
|
||||
.post("/avatars/123456789")
|
||||
.set({ signature: Config.get().security.requestSignature })
|
||||
.attach("file", __dirname + "/antman.jpg");
|
||||
request.delete(response.body.url.replace("http://localhost:3003", "")).then((x) => {
|
||||
expect(x.body.success).toBeDefined();
|
||||
});
|
||||
request
|
||||
.delete(
|
||||
response.body.url.replace("http://localhost:3003", "")
|
||||
)
|
||||
.then((x) => {
|
||||
expect(x.body.success).toBeDefined();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
@ -177,9 +198,13 @@ describe("/external", () => {
|
||||
const response = await request
|
||||
.post("/external")
|
||||
.set({ signature: Config.get().security.requestSignature })
|
||||
.send({ url: "https://i.ytimg.com/vi_webp/TiXzhQr5AUc/mqdefault.webp" });
|
||||
.send({
|
||||
url: "https://i.ytimg.com/vi_webp/TiXzhQr5AUc/mqdefault.webp",
|
||||
});
|
||||
expect(response.statusCode).toBe(200);
|
||||
expect(response.headers["content-type"]).toEqual(expect.stringContaining("json"));
|
||||
expect(response.headers["content-type"]).toEqual(
|
||||
expect.stringContaining("json")
|
||||
);
|
||||
expect(response.body.id).toBeDefined();
|
||||
});
|
||||
});
|
||||
@ -201,7 +226,9 @@ describe("/external", () => {
|
||||
let response = await request
|
||||
.post("/external")
|
||||
.set({ signature: Config.get().security.requestSignature })
|
||||
.send({ url: "https://i.ytimg.com/vi_webp/TiXzhQr5AUc/mqdefault.webp" });
|
||||
.send({
|
||||
url: "https://i.ytimg.com/vi_webp/TiXzhQr5AUc/mqdefault.webp",
|
||||
});
|
||||
request.get(`external/${response.body.id}`).then((x) => {
|
||||
expect(x.statusCode).toBe(200);
|
||||
});
|
||||
|
@ -7,7 +7,10 @@
|
||||
"incremental": true /* Enable incremental compilation */,
|
||||
"target": "ES6" /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019', 'ES2020', or 'ESNEXT'. */,
|
||||
"module": "commonjs" /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', 'es2020', or 'ESNext'. */,
|
||||
"lib": ["ES2015", "dom"] /* Specify library files to be included in the compilation. */,
|
||||
"lib": [
|
||||
"ES2015",
|
||||
"dom"
|
||||
] /* Specify library files to be included in the compilation. */,
|
||||
"allowJs": true /* Allow javascript files to be compiled. */,
|
||||
"checkJs": true /* Report errors in .js files. */,
|
||||
// "jsx": "preserve", /* Specify JSX code generation: 'preserve', 'react-native', or 'react'. */
|
||||
@ -48,7 +51,9 @@
|
||||
// "paths": {}, /* A series of entries which re-map imports to lookup locations relative to the 'baseUrl'. */
|
||||
// "rootDirs": [], /* List of root folders whose combined content represents the structure of the project at runtime. */
|
||||
// "typeRoots": [], /* List of folders to include type definitions from. */
|
||||
"types": ["node"] /* Type declaration files to be included in compilation. */,
|
||||
"types": [
|
||||
"node"
|
||||
] /* Type declaration files to be included in compilation. */,
|
||||
// "allowSyntheticDefaultImports": true, /* Allow default imports from modules with no default export. This does not affect code emit, just typechecking. */
|
||||
"esModuleInterop": true /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */,
|
||||
// "preserveSymlinks": true, /* Do not resolve the real path of symlinks. */
|
||||
|
@ -1,3 +1,3 @@
|
||||
files:
|
||||
- source: /api/locales/en/*.json
|
||||
translation: /api/locales/%two_letters_code%/%original_file_name%
|
||||
- source: /api/locales/en/*.json
|
||||
translation: /api/locales/%two_letters_code%/%original_file_name%
|
||||
|
@ -1,24 +1,7 @@
|
||||
version: "3"
|
||||
services:
|
||||
db:
|
||||
hostname: fosscord_db
|
||||
image: mongo:latest
|
||||
volumes:
|
||||
- ./db:/data/db
|
||||
restart: unless-stopped
|
||||
api:
|
||||
hostname: fosscord_api
|
||||
image: fosscord/api
|
||||
depends_on:
|
||||
- db
|
||||
ports:
|
||||
- 3001:3001
|
||||
env_file: ./.docker/env
|
||||
gateway:
|
||||
hostname: fosscord_gateway
|
||||
image: fosscord/gateway
|
||||
depends_on:
|
||||
- db
|
||||
ports:
|
||||
- 3002:3002
|
||||
env_file: ./.docker/env
|
||||
server:
|
||||
image: fosscord/server
|
||||
build: .
|
||||
ports:
|
||||
- 3001:3001
|
||||
|
@ -26,4 +26,4 @@
|
||||
}
|
||||
],
|
||||
"settings": {}
|
||||
}
|
||||
}
|
||||
|
2
gateway/.github/FUNDING.yml
vendored
2
gateway/.github/FUNDING.yml
vendored
@ -1 +1 @@
|
||||
open_collective: fosscord
|
||||
open_collective: fosscord
|
||||
|
4194
gateway/package-lock.json
generated
4194
gateway/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -33,6 +33,7 @@
|
||||
"lambert-server": "^1.2.11",
|
||||
"missing-native-js-functions": "^1.2.18",
|
||||
"node-fetch": "^2.6.1",
|
||||
"proxy-agent": "^5.0.0",
|
||||
"typeorm": "^0.2.37",
|
||||
"ws": "^7.4.2"
|
||||
},
|
||||
|
@ -153,9 +153,11 @@ async function consume(this: WebSocket, opts: EventOpts) {
|
||||
!permission
|
||||
.overwriteChannel(data.permission_overwrites)
|
||||
.has("VIEW_CHANNEL")
|
||||
)
|
||||
) {
|
||||
return;
|
||||
// No break needed here, we need to call the listenEvent function below
|
||||
}
|
||||
this.events[id] = await listenEvent(id, consumer, listenOpts);
|
||||
break;
|
||||
case "RELATIONSHIP_ADD":
|
||||
this.events[data.user.id] = await listenEvent(
|
||||
data.user.id,
|
||||
|
@ -7,5 +7,7 @@ export function genVoiceToken() {
|
||||
}
|
||||
|
||||
function genRanHex(size: number) {
|
||||
return [...Array(size)].map(() => Math.floor(Math.random() * 16).toString(16)).join("");
|
||||
return [...Array(size)]
|
||||
.map(() => Math.floor(Math.random() * 16).toString(16))
|
||||
.join("");
|
||||
}
|
||||
|
@ -8,7 +8,11 @@
|
||||
"incremental": true /* Enable incremental compilation */,
|
||||
"target": "ES2020" /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019', 'ES2020', or 'ESNEXT'. */,
|
||||
"module": "commonjs" /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', 'es2020', or 'ESNext'. */,
|
||||
"lib": ["ES2015", "ES2020.BigInt", "DOM"] /* Specify library files to be included in the compilation. */,
|
||||
"lib": [
|
||||
"ES2015",
|
||||
"ES2020.BigInt",
|
||||
"DOM"
|
||||
] /* Specify library files to be included in the compilation. */,
|
||||
"allowJs": true /* Allow javascript files to be compiled. */,
|
||||
"checkJs": true /* Report errors in .js files. */,
|
||||
// "jsx": "preserve", /* Specify JSX code generation: 'preserve', 'react-native', or 'react'. */
|
||||
@ -48,7 +52,9 @@
|
||||
// "paths": {}, /* A series of entries which re-map imports to lookup locations relative to the 'baseUrl'. */
|
||||
// "rootDirs": [], /* List of root folders whose combined content represents the structure of the project at runtime. */
|
||||
// "typeRoots": [], /* List of folders to include type definitions from. */
|
||||
"types": ["node"] /* Type declaration files to be included in compilation. */,
|
||||
"types": [
|
||||
"node"
|
||||
] /* Type declaration files to be included in compilation. */,
|
||||
// "allowSyntheticDefaultImports": true, /* Allow default imports from modules with no default export. This does not affect code emit, just typechecking. */
|
||||
"esModuleInterop": true /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */,
|
||||
// "preserveSymlinks": true, /* Do not resolve the real path of symlinks. */
|
||||
|
@ -1 +1 @@
|
||||
{}
|
||||
{}
|
||||
|
6
rtc/package-lock.json
generated
6
rtc/package-lock.json
generated
@ -1,6 +0,0 @@
|
||||
{
|
||||
"name": "rtc",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {}
|
||||
}
|
@ -1 +0,0 @@
|
||||
{}
|
678
util/package-lock.json
generated
678
util/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -40,6 +40,7 @@
|
||||
"dependencies": {
|
||||
"amqplib": "^0.8.0",
|
||||
"better-sqlite3": "^7.4.3",
|
||||
"form-data": "^4.0.0",
|
||||
"jsonwebtoken": "^8.5.1",
|
||||
"lambert-server": "^1.2.12",
|
||||
"missing-native-js-functions": "^1.2.18",
|
||||
@ -48,6 +49,7 @@
|
||||
"node-fetch": "^2.6.1",
|
||||
"patch-package": "^6.4.7",
|
||||
"pg": "^8.7.1",
|
||||
"proxy-agent": "^5.0.0",
|
||||
"reflect-metadata": "^0.1.13",
|
||||
"typeorm": "^0.2.38",
|
||||
"typescript": "^4.4.2",
|
||||
|
@ -12,24 +12,30 @@ export class DmChannelDTO {
|
||||
type: number;
|
||||
|
||||
static async from(channel: Channel, excluded_recipients: string[] = [], origin_channel_id?: string) {
|
||||
const obj = new DmChannelDTO()
|
||||
obj.icon = channel.icon || null
|
||||
obj.id = channel.id
|
||||
obj.last_message_id = channel.last_message_id || null
|
||||
obj.name = channel.name || null
|
||||
obj.origin_channel_id = origin_channel_id || null
|
||||
obj.owner_id = channel.owner_id
|
||||
obj.type = channel.type
|
||||
obj.recipients = (await Promise.all(channel.recipients!.filter(r => !excluded_recipients.includes(r.user_id)).map(async r => {
|
||||
return await User.findOneOrFail({ where: { id: r.user_id }, select: PublicUserProjection })
|
||||
}))).map(u => new MinimalPublicUserDTO(u))
|
||||
return obj
|
||||
const obj = new DmChannelDTO();
|
||||
obj.icon = channel.icon || null;
|
||||
obj.id = channel.id;
|
||||
obj.last_message_id = channel.last_message_id || null;
|
||||
obj.name = channel.name || null;
|
||||
obj.origin_channel_id = origin_channel_id || null;
|
||||
obj.owner_id = channel.owner_id;
|
||||
obj.type = channel.type;
|
||||
obj.recipients = (
|
||||
await Promise.all(
|
||||
channel
|
||||
.recipients!.filter((r) => !excluded_recipients.includes(r.user_id))
|
||||
.map(async (r) => {
|
||||
return await User.findOneOrFail({ where: { id: r.user_id }, select: PublicUserProjection });
|
||||
})
|
||||
)
|
||||
).map((u) => new MinimalPublicUserDTO(u));
|
||||
return obj;
|
||||
}
|
||||
|
||||
excludedRecipients(excluded_recipients: string[]): DmChannelDTO {
|
||||
return {
|
||||
...this,
|
||||
recipients: this.recipients.filter(r => !excluded_recipients.includes(r.id))
|
||||
}
|
||||
recipients: this.recipients.filter((r) => !excluded_recipients.includes(r.id)),
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -8,10 +8,10 @@ export class MinimalPublicUserDTO {
|
||||
username: string;
|
||||
|
||||
constructor(user: User) {
|
||||
this.avatar = user.avatar
|
||||
this.discriminator = user.discriminator
|
||||
this.id = user.id
|
||||
this.public_flags = user.public_flags
|
||||
this.username = user.username
|
||||
this.avatar = user.avatar;
|
||||
this.discriminator = user.discriminator;
|
||||
this.id = user.id;
|
||||
this.public_flags = user.public_flags;
|
||||
this.username = user.username;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,2 +1,2 @@
|
||||
export * from "./DmChannelDTO";
|
||||
export * from "./UserDTO";
|
||||
export * from "./UserDTO";
|
||||
|
@ -71,7 +71,7 @@ export class Invite extends BaseClassWithoutId {
|
||||
@Column({ nullable: true })
|
||||
target_user_type?: number;
|
||||
|
||||
@Column({ nullable: true})
|
||||
@Column({ nullable: true })
|
||||
vanity_url?: boolean;
|
||||
|
||||
static async joinGuild(user_id: string, code: string) {
|
||||
|
@ -9,6 +9,7 @@ import {
|
||||
CreateDateColumn,
|
||||
Entity,
|
||||
FindConditions,
|
||||
Index,
|
||||
JoinColumn,
|
||||
JoinTable,
|
||||
ManyToMany,
|
||||
@ -45,9 +46,11 @@ export enum MessageType {
|
||||
}
|
||||
|
||||
@Entity("messages")
|
||||
@Index(["channel_id", "id"], { unique: true })
|
||||
export class Message extends BaseClass {
|
||||
@Column({ nullable: true })
|
||||
@RelationId((message: Message) => message.channel)
|
||||
@Index()
|
||||
channel_id: string;
|
||||
|
||||
@JoinColumn({ name: "channel_id" })
|
||||
@ -68,10 +71,13 @@ export class Message extends BaseClass {
|
||||
|
||||
@Column({ nullable: true })
|
||||
@RelationId((message: Message) => message.author)
|
||||
@Index()
|
||||
author_id: string;
|
||||
|
||||
@JoinColumn({ name: "author_id", referencedColumnName: "id" })
|
||||
@ManyToOne(() => User)
|
||||
@ManyToOne(() => User, {
|
||||
onDelete: "CASCADE",
|
||||
})
|
||||
author?: User;
|
||||
|
||||
@Column({ nullable: true })
|
||||
|
@ -10,7 +10,7 @@ export class Migration extends BaseClassWithoutId {
|
||||
@PrimaryIdAutoGenerated()
|
||||
id: number;
|
||||
|
||||
@Column({ type: 'bigint' })
|
||||
@Column({ type: "bigint" })
|
||||
timestamp: number;
|
||||
|
||||
@Column()
|
||||
|
@ -9,7 +9,8 @@ export class ApiError extends Error {
|
||||
}
|
||||
|
||||
withDefaultParams(): ApiError {
|
||||
if (this.defaultParams) return new ApiError(applyParamsToString(this.message, this.defaultParams), this.code, this.httpStatus);
|
||||
if (this.defaultParams)
|
||||
return new ApiError(applyParamsToString(this.message, this.defaultParams), this.code, this.httpStatus);
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -1,3 +1,3 @@
|
||||
export function containsAll(arr: any[], target: any[]) {
|
||||
return target.every(v => arr.includes(v));
|
||||
}
|
||||
return target.every((v) => arr.includes(v));
|
||||
}
|
||||
|
@ -1,5 +1,6 @@
|
||||
import "missing-native-js-functions";
|
||||
import fetch from "node-fetch";
|
||||
import ProxyAgent from 'proxy-agent';
|
||||
import readline from "readline";
|
||||
import fs from "fs/promises";
|
||||
import path from "path";
|
||||
@ -52,7 +53,8 @@ async function download(url: string, dir: string) {
|
||||
try {
|
||||
// TODO: use file stream instead of buffer (to prevent crash because of high memory usage for big files)
|
||||
// TODO check file hash
|
||||
const response = await fetch(url);
|
||||
const agent = new ProxyAgent();
|
||||
const response = await fetch(url, { agent });
|
||||
const buffer = await response.buffer();
|
||||
const tempDir = await fs.mkdtemp("fosscord");
|
||||
fs.writeFile(path.join(tempDir, "Fosscord.zip"), buffer);
|
||||
@ -72,7 +74,8 @@ async function getCurrentVersion(dir: string) {
|
||||
|
||||
async function getLatestVersion(url: string) {
|
||||
try {
|
||||
const response = await fetch(url);
|
||||
const agent = new ProxyAgent();
|
||||
const response = await fetch(url, { agent });
|
||||
const content = await response.json();
|
||||
return content.version;
|
||||
} catch (error) {
|
||||
|
@ -4,7 +4,7 @@ import path from "path";
|
||||
import fs from "fs";
|
||||
|
||||
// TODO: yaml instead of json
|
||||
const overridePath = path.join(process.cwd(), "config.json");
|
||||
// const overridePath = path.join(process.cwd(), "config.json");
|
||||
|
||||
var config: ConfigValue;
|
||||
var pairs: ConfigEntity[];
|
||||
@ -19,12 +19,12 @@ export const Config = {
|
||||
config = pairsToConfig(pairs);
|
||||
config = (config || {}).merge(DefaultConfigOptions);
|
||||
|
||||
try {
|
||||
const overrideConfig = JSON.parse(fs.readFileSync(overridePath, { encoding: "utf8" }));
|
||||
config = overrideConfig.merge(config);
|
||||
} catch (error) {
|
||||
fs.writeFileSync(overridePath, JSON.stringify(config, null, 4));
|
||||
}
|
||||
// try {
|
||||
// const overrideConfig = JSON.parse(fs.readFileSync(overridePath, { encoding: "utf8" }));
|
||||
// config = overrideConfig.merge(config);
|
||||
// } catch (error) {
|
||||
// fs.writeFileSync(overridePath, JSON.stringify(config, null, 4));
|
||||
// }
|
||||
|
||||
return this.set(config);
|
||||
},
|
||||
@ -51,7 +51,7 @@ function applyConfig(val: ConfigValue) {
|
||||
pair.value = obj;
|
||||
return pair.save();
|
||||
}
|
||||
fs.writeFileSync(overridePath, JSON.stringify(val, null, 4));
|
||||
// fs.writeFileSync(overridePath, JSON.stringify(val, null, 4));
|
||||
|
||||
return apply(val);
|
||||
}
|
||||
|
@ -8,9 +8,9 @@ export function FieldErrors(fields: Record<string, { code?: string; message: str
|
||||
_errors: [
|
||||
{
|
||||
message,
|
||||
code: code || "BASE_TYPE_INVALID"
|
||||
}
|
||||
]
|
||||
code: code || "BASE_TYPE_INVALID",
|
||||
},
|
||||
],
|
||||
}))
|
||||
);
|
||||
}
|
||||
|
@ -56,7 +56,7 @@ export class Rights extends BitField {
|
||||
INVITE_USERS: BitFlag(29), // can create user-specific invites in the guilds that they have INVITE_USERS
|
||||
SELF_DELETE_DISABLE: BitFlag(30), // can disable/delete own account
|
||||
DEBTABLE: BitFlag(31), // can use pay-to-use features
|
||||
CREDITABLE: BitFlag(32) // can receive money from monetisation related features
|
||||
CREDITABLE: BitFlag(32), // can receive money from monetisation related features
|
||||
};
|
||||
|
||||
any(permission: RightResolvable, checkOperator = true) {
|
||||
|
@ -1,2 +1,3 @@
|
||||
# fosscord-rtc-js
|
||||
|
||||
A javascript fosscord webrtc server for voice and video communication
|
||||
|
@ -23,7 +23,11 @@ export class Server {
|
||||
ssrc: 1,
|
||||
ip: "127.0.0.1",
|
||||
port: 3004,
|
||||
modes: ["xsalsa20_poly1305", "xsalsa20_poly1305_suffix", "xsalsa20_poly1305_lite"],
|
||||
modes: [
|
||||
"xsalsa20_poly1305",
|
||||
"xsalsa20_poly1305_suffix",
|
||||
"xsalsa20_poly1305_lite",
|
||||
],
|
||||
heartbeat_interval: 1,
|
||||
},
|
||||
})
|
||||
|
@ -7,7 +7,9 @@
|
||||
// "incremental": true, /* Enable incremental compilation */
|
||||
"target": "ESNext" /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019', 'ES2020', or 'ESNEXT'. */,
|
||||
"module": "commonjs" /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', 'es2020', or 'ESNext'. */,
|
||||
"lib": ["ES2021"] /* Specify library files to be included in the compilation. */,
|
||||
"lib": [
|
||||
"ES2021"
|
||||
] /* Specify library files to be included in the compilation. */,
|
||||
"allowJs": true /* Allow javascript files to be compiled. */,
|
||||
"checkJs": true /* Report errors in .js files. */,
|
||||
// "jsx": "preserve", /* Specify JSX code generation: 'preserve', 'react-native', or 'react'. */
|
||||
@ -45,7 +47,9 @@
|
||||
// "moduleResolution": "node", /* Specify module resolution strategy: 'node' (Node.js) or 'classic' (TypeScript pre-1.6). */
|
||||
// "rootDirs": [], /* List of root folders whose combined content represents the structure of the project at runtime. */
|
||||
// "typeRoots": [], /* List of folders to include type definitions from. */
|
||||
"types": ["node"] /* Type declaration files to be included in compilation. */,
|
||||
"types": [
|
||||
"node"
|
||||
] /* Type declaration files to be included in compilation. */,
|
||||
// "allowSyntheticDefaultImports": true, /* Allow default imports from modules with no default export. This does not affect code emit, just typechecking. */
|
||||
"esModuleInterop": true /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */,
|
||||
// "preserveSymlinks": true, /* Do not resolve the real path of symlinks. */
|
||||
|
Loading…
Reference in New Issue
Block a user