Skip to content

Commit

Permalink
Merge remote-tracking branch 'upstream/master'
Browse files Browse the repository at this point in the history
  • Loading branch information
liamwhite committed Jul 1, 2024
2 parents 3366234 + d799c9d commit 521ea49
Show file tree
Hide file tree
Showing 13 changed files with 136 additions and 89 deletions.
24 changes: 12 additions & 12 deletions assets/js/shortcuts.ts
Original file line number Diff line number Diff line change
Expand Up @@ -44,28 +44,28 @@ function isOK(event: KeyboardEvent): boolean {
}

const keyCodes: ShortcutKeyMap = {
KeyJ() { click('.js-prev'); }, // J - go to previous image
KeyI() { click('.js-up'); }, // I - go to index page
KeyK() { click('.js-next'); }, // K - go to next image
KeyR() { click('.js-rand'); }, // R - go to random image
KeyS() { click('.js-source-link'); }, // S - go to image source
KeyL() { click('.js-tag-sauce-toggle'); }, // L - edit tags
KeyO() { openFullView(); }, // O - open original
KeyV() { openFullViewNewTab(); }, // V - open original in a new tab
KeyF() { // F - favourite image
'j'() { click('.js-prev'); }, // J - go to previous image
'i'() { click('.js-up'); }, // I - go to index page
'k'() { click('.js-next'); }, // K - go to next image
'r'() { click('.js-rand'); }, // R - go to random image
's'() { click('.js-source-link'); }, // S - go to image source
'l'() { click('.js-tag-sauce-toggle'); }, // L - edit tags
'o'() { openFullView(); }, // O - open original
'v'() { openFullViewNewTab(); }, // V - open original in a new tab
'f'() { // F - favourite image
click(getHover() ? `a.interaction--fave[data-image-id="${getHover()}"]`
: '.block__header a.interaction--fave');
},
KeyU() { // U - upvote image
'u'() { // U - upvote image
click(getHover() ? `a.interaction--upvote[data-image-id="${getHover()}"]`
: '.block__header a.interaction--upvote');
},
};

export function listenForKeys() {
document.addEventListener('keydown', (event: KeyboardEvent) => {
if (isOK(event) && keyCodes[event.code]) {
keyCodes[event.code]();
if (isOK(event) && keyCodes[event.key]) {
keyCodes[event.key]();
event.preventDefault();
}
});
Expand Down
2 changes: 1 addition & 1 deletion docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ services:
driver: "none"

opensearch:
image: opensearchproject/opensearch:2.14.0
image: opensearchproject/opensearch:2.15.0
volumes:
- opensearch_data:/usr/share/opensearch/data
- ./docker/opensearch/opensearch.yml:/usr/share/opensearch/config/opensearch.yml
Expand Down
7 changes: 4 additions & 3 deletions index/comments.mk
Original file line number Diff line number Diff line change
@@ -1,15 +1,16 @@
DATABASE ?= philomena
OPENSEARCH_URL ?= http://localhost:9200/
ELASTICDUMP ?= elasticdump
.ONESHELL:

all: import_es

import_es: dump_jsonl
$(ELASTICDUMP) --input=comments.jsonl --output=http://localhost:9200/ --output-index=comments --limit 10000 --retryAttempts=5 --type=data --transform="doc._source = Object.assign({},doc); doc._id = doc.id"
$(ELASTICDUMP) --input=comments.jsonl --output=$OPENSEARCH_URL --output-index=comments --limit 10000 --retryAttempts=5 --type=data --transform="doc._source = Object.assign({},doc); doc._id = doc.id"

dump_jsonl: metadata authors tags
psql $(DATABASE) -v ON_ERROR_STOP=1 <<< 'copy (select temp_comments.jsonb_object_agg(object) from temp_comments.comment_search_json group by comment_id) to stdout;' > comments.jsonl
psql $(DATABASE) -v ON_ERROR_STOP=1 <<< 'drop schema temp_comments cascade;'
psql $(DATABASE) -v ON_ERROR_STOP=1 -c 'copy (select temp_comments.jsonb_object_agg(object) from temp_comments.comment_search_json group by comment_id) to stdout;' > comments.jsonl
psql $(DATABASE) -v ON_ERROR_STOP=1 -c 'drop schema temp_comments cascade;'
sed -i comments.jsonl -e 's/\\\\/\\/g'

metadata: comment_search_json
Expand Down
11 changes: 4 additions & 7 deletions index/filters.mk
Original file line number Diff line number Diff line change
@@ -1,19 +1,16 @@
DATABASE ?= philomena
ELASTICSEARCH_URL ?= http://localhost:9200/
OPENSEARCH_URL ?= http://localhost:9200/
ELASTICDUMP ?= elasticdump
# uncomment if getting "redirection unexpected" error on dump_jsonl
#SHELL=/bin/bash

.ONESHELL:

all: import_es

import_es: dump_jsonl
$(ELASTICDUMP) --input=filters.jsonl --output=$(ELASTICSEARCH_URL) --output-index=filters --limit 10000 --retryAttempts=5 --type=data --transform="doc._source = Object.assign({},doc); doc._id = doc.id"
$(ELASTICDUMP) --input=filters.jsonl --output=$OPENSEARCH_URL --output-index=filters --limit 10000 --retryAttempts=5 --type=data --transform="doc._source = Object.assign({},doc); doc._id = doc.id"

dump_jsonl: metadata creators
psql $(DATABASE) -v ON_ERROR_STOP=1 <<< 'copy (select temp_filters.jsonb_object_agg(object) from temp_filters.filter_search_json group by filter_id) to stdout;' > filters.jsonl
psql $(DATABASE) -v ON_ERROR_STOP=1 <<< 'drop schema temp_filters cascade;'
psql $(DATABASE) -v ON_ERROR_STOP=1 -c 'copy (select temp_filters.jsonb_object_agg(object) from temp_filters.filter_search_json group by filter_id) to stdout;' > filters.jsonl
psql $(DATABASE) -v ON_ERROR_STOP=1 -c 'drop schema temp_filters cascade;'
sed -i filters.jsonl -e 's/\\\\/\\/g'

metadata: filter_search_json
Expand Down
7 changes: 4 additions & 3 deletions index/galleries.mk
Original file line number Diff line number Diff line change
@@ -1,15 +1,16 @@
DATABASE ?= philomena
OPENSEARCH_URL ?= http://localhost:9200/
ELASTICDUMP ?= elasticdump
.ONESHELL:

all: import_es

import_es: dump_jsonl
$(ELASTICDUMP) --input=galleries.jsonl --output=http://localhost:9200/ --output-index=galleries --limit 10000 --retryAttempts=5 --type=data --transform="doc._source = Object.assign({},doc); doc._id = doc.id"
$(ELASTICDUMP) --input=galleries.jsonl --output=$OPENSEARCH_URL --output-index=galleries --limit 10000 --retryAttempts=5 --type=data --transform="doc._source = Object.assign({},doc); doc._id = doc.id"

dump_jsonl: metadata subscribers images
psql $(DATABASE) -v ON_ERROR_STOP=1 <<< 'copy (select temp_galleries.jsonb_object_agg(object) from temp_galleries.gallery_search_json group by gallery_id) to stdout;' > galleries.jsonl
psql $(DATABASE) -v ON_ERROR_STOP=1 <<< 'drop schema temp_galleries cascade;'
psql $(DATABASE) -v ON_ERROR_STOP=1 -c 'copy (select temp_galleries.jsonb_object_agg(object) from temp_galleries.gallery_search_json group by gallery_id) to stdout;' > galleries.jsonl
psql $(DATABASE) -v ON_ERROR_STOP=1 -c 'drop schema temp_galleries cascade;'
sed -i galleries.jsonl -e 's/\\\\/\\/g'

metadata: gallery_search_json
Expand Down
9 changes: 5 additions & 4 deletions index/images.mk
Original file line number Diff line number Diff line change
@@ -1,15 +1,16 @@
DATABASE ?= philomena
OPENSEARCH_URL ?= http://localhost:9200/
ELASTICDUMP ?= elasticdump
.ONESHELL:

all: import_es

import_es: dump_jsonl
$(ELASTICDUMP) --input=images.jsonl --output=http://localhost:9200/ --output-index=images --limit 10000 --retryAttempts=5 --type=data --transform="doc._source = Object.assign({},doc); doc._id = doc.id"
$(ELASTICDUMP) --input=images.jsonl --output=$OPENSEARCH_URL --output-index=images --limit 10000 --retryAttempts=5 --type=data --transform="doc._source = Object.assign({},doc); doc._id = doc.id"

dump_jsonl: metadata true_uploaders uploaders deleters galleries tags sources hides upvotes downvotes faves tag_names
psql $(DATABASE) -v ON_ERROR_STOP=1 <<< 'copy (select temp_images.jsonb_object_agg(object) from temp_images.image_search_json group by image_id) to stdout;' > images.jsonl
psql $(DATABASE) -v ON_ERROR_STOP=1 <<< 'drop schema temp_images cascade;'
psql $(DATABASE) -v ON_ERROR_STOP=1 -c 'copy (select temp_images.jsonb_object_agg(object) from temp_images.image_search_json group by image_id) to stdout;' > images.jsonl
psql $(DATABASE) -v ON_ERROR_STOP=1 -c 'drop schema temp_images cascade;'
sed -i images.jsonl -e 's/\\\\/\\/g'

metadata: image_search_json
Expand Down Expand Up @@ -84,7 +85,7 @@ tags: image_search_json
'body_type_tag_count', count(case when t.category = 'body-type' then t.category else null end),
'content_fanmade_tag_count', count(case when t.category = 'content-fanmade' then t.category else null end),
'content_official_tag_count', count(case when t.category = 'content-official' then t.category else null end),
'spoiler_tag_count', count(case when t.category = 'spoiler' then t.category else null end),
'spoiler_tag_count', count(case when t.category = 'spoiler' then t.category else null end)
) from image_taggings it inner join tags t on t.id = it.tag_id group by image_id;
SQL

Expand Down
7 changes: 4 additions & 3 deletions index/posts.mk
Original file line number Diff line number Diff line change
@@ -1,15 +1,16 @@
DATABASE ?= philomena
OPENSEARCH_URL ?= http://localhost:9200/
ELASTICDUMP ?= elasticdump
.ONESHELL:

all: import_es

import_es: dump_jsonl
$(ELASTICDUMP) --input=posts.jsonl --output=http://localhost:9200/ --output-index=posts --limit 10000 --retryAttempts=5 --type=data --transform="doc._source = Object.assign({},doc); doc._id = doc.id"
$(ELASTICDUMP) --input=posts.jsonl --output=$OPENSEARCH_URL --output-index=posts --limit 10000 --retryAttempts=5 --type=data --transform="doc._source = Object.assign({},doc); doc._id = doc.id"

dump_jsonl: metadata authors
psql $(DATABASE) -v ON_ERROR_STOP=1 <<< 'copy (select temp_posts.jsonb_object_agg(object) from temp_posts.post_search_json group by post_id) to stdout;' > posts.jsonl
psql $(DATABASE) -v ON_ERROR_STOP=1 <<< 'drop schema temp_posts cascade;'
psql $(DATABASE) -v ON_ERROR_STOP=1 -c 'copy (select temp_posts.jsonb_object_agg(object) from temp_posts.post_search_json group by post_id) to stdout;' > posts.jsonl
psql $(DATABASE) -v ON_ERROR_STOP=1 -c 'drop schema temp_posts cascade;'
sed -i posts.jsonl -e 's/\\\\/\\/g'

metadata: post_search_json
Expand Down
7 changes: 4 additions & 3 deletions index/reports.mk
Original file line number Diff line number Diff line change
@@ -1,15 +1,16 @@
DATABASE ?= philomena
OPENSEARCH_URL ?= http://localhost:9200/
ELASTICDUMP ?= elasticdump
.ONESHELL:

all: import_es

import_es: dump_jsonl
$(ELASTICDUMP) --input=reports.jsonl --output=http://localhost:9200/ --output-index=reports --limit 10000 --retryAttempts=5 --type=data --transform="doc._source = Object.assign({},doc); doc._id = doc.id"
$(ELASTICDUMP) --input=reports.jsonl --output=$OPENSEARCH_URL --output-index=reports --limit 10000 --retryAttempts=5 --type=data --transform="doc._source = Object.assign({},doc); doc._id = doc.id"

dump_jsonl: metadata image_ids comment_image_ids
psql $(DATABASE) -v ON_ERROR_STOP=1 <<< 'copy (select temp_reports.jsonb_object_agg(object) from temp_reports.report_search_json group by report_id) to stdout;' > reports.jsonl
psql $(DATABASE) -v ON_ERROR_STOP=1 <<< 'drop schema temp_reports cascade;'
psql $(DATABASE) -v ON_ERROR_STOP=1 -c 'copy (select temp_reports.jsonb_object_agg(object) from temp_reports.report_search_json group by report_id) to stdout;' > reports.jsonl
psql $(DATABASE) -v ON_ERROR_STOP=1 -c 'drop schema temp_reports cascade;'
sed -i reports.jsonl -e 's/\\\\/\\/g'

metadata: report_search_json
Expand Down
7 changes: 4 additions & 3 deletions index/tags.mk
Original file line number Diff line number Diff line change
@@ -1,15 +1,16 @@
DATABASE ?= philomena
OPENSEARCH_URL ?= http://localhost:9200/
ELASTICDUMP ?= elasticdump
.ONESHELL:

all: import_es

import_es: dump_jsonl
$(ELASTICDUMP) --input=tags.jsonl --output=http://localhost:9200/ --output-index=tags --limit 10000 --retryAttempts=5 --type=data --transform="doc._source = Object.assign({},doc); doc._id = doc.id"
$(ELASTICDUMP) --input=tags.jsonl --output=$OPENSEARCH_URL --output-index=tags --limit 10000 --retryAttempts=5 --type=data --transform="doc._source = Object.assign({},doc); doc._id = doc.id"

dump_jsonl: metadata aliases implied_tags implied_by_tags
psql $(DATABASE) -v ON_ERROR_STOP=1 <<< 'copy (select temp_tags.jsonb_object_agg(object) from temp_tags.tag_search_json group by tag_id) to stdout;' > tags.jsonl
psql $(DATABASE) -v ON_ERROR_STOP=1 <<< 'drop schema temp_tags cascade;'
psql $(DATABASE) -v ON_ERROR_STOP=1 -c 'copy (select temp_tags.jsonb_object_agg(object) from temp_tags.tag_search_json group by tag_id) to stdout;' > tags.jsonl
psql $(DATABASE) -v ON_ERROR_STOP=1 -c 'drop schema temp_tags cascade;'
sed -i tags.jsonl -e 's/\\\\/\\/g'

metadata: tag_search_json
Expand Down
58 changes: 21 additions & 37 deletions lib/philomena/channels.ex
Original file line number Diff line number Diff line change
Expand Up @@ -6,49 +6,15 @@ defmodule Philomena.Channels do
import Ecto.Query, warn: false
alias Philomena.Repo

alias Philomena.Channels.AutomaticUpdater
alias Philomena.Channels.Channel
alias Philomena.Channels.PicartoChannel
alias Philomena.Channels.PiczelChannel
alias Philomena.Notifications

@doc """
Updates all the tracked channels for which an update
scheme is known.
Updates all the tracked channels for which an update scheme is known.
"""
def update_tracked_channels! do
now = DateTime.utc_now() |> DateTime.truncate(:second)

picarto_channels = PicartoChannel.live_channels(now)
live_picarto_channels = Map.keys(picarto_channels)

piczel_channels = PiczelChannel.live_channels(now)
live_piczel_channels = Map.keys(piczel_channels)

# Update all channels which are offline to reflect offline status
offline_query =
from c in Channel,
where: c.type == "PicartoChannel" and c.short_name not in ^live_picarto_channels,
or_where: c.type == "PiczelChannel" and c.short_name not in ^live_piczel_channels

Repo.update_all(offline_query, set: [is_live: false, updated_at: now])

# Update all channels which are online to reflect online status using
# changeset functions
online_query =
from c in Channel,
where: c.type == "PicartoChannel" and c.short_name in ^live_picarto_channels,
or_where: c.type == "PiczelChannel" and c.short_name in ^live_picarto_channels

online_query
|> Repo.all()
|> Enum.map(fn
%{type: "PicartoChannel", short_name: name} = channel ->
Channel.update_changeset(channel, Map.get(picarto_channels, name, []))

%{type: "PiczelChannel", short_name: name} = channel ->
Channel.update_changeset(channel, Map.get(piczel_channels, name, []))
end)
|> Enum.map(&Repo.update!/1)
AutomaticUpdater.update_tracked_channels!()
end

@doc """
Expand Down Expand Up @@ -103,6 +69,24 @@ defmodule Philomena.Channels do
|> Repo.update()
end

@doc """
Updates a channel's state when it goes live.
## Examples
iex> update_channel_state(channel, %{field: new_value})
{:ok, %Channel{}}
iex> update_channel_state(channel, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def update_channel_state(%Channel{} = channel, attrs) do
channel
|> Channel.update_changeset(attrs)
|> Repo.update()
end

@doc """
Deletes a Channel.
Expand Down
64 changes: 64 additions & 0 deletions lib/philomena/channels/automatic_updater.ex
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
defmodule Philomena.Channels.AutomaticUpdater do
@moduledoc """
Automatic update routine for streams.
Calls APIs for each stream provider to remove channels which are no longer online,
and to restore channels which are currently online.
"""

import Ecto.Query, warn: false
alias Philomena.Repo

alias Philomena.Channels
alias Philomena.Channels.Channel
alias Philomena.Channels.PicartoChannel
alias Philomena.Channels.PiczelChannel

@doc """
Updates all the tracked channels for which an update scheme is known.
"""
def update_tracked_channels! do
now = DateTime.utc_now(:second)
Enum.each(providers(), &update_provider(&1, now))
end

defp providers do
[
{"PicartoChannel", PicartoChannel.live_channels()},
{"PiczelChannel", PiczelChannel.live_channels()}
]
end

defp update_provider({provider_name, live_channels}, now) do
channel_names = Map.keys(live_channels)

provider_name
|> update_offline_query(channel_names, now)
|> Repo.update_all([])

provider_name
|> online_query(channel_names)
|> Repo.all()
|> Enum.each(&update_online_channel(&1, live_channels, now))
end

defp update_offline_query(provider_name, channel_names, now) do
from c in Channel,
where: c.type == ^provider_name and c.short_name not in ^channel_names,
update: [set: [is_live: false, updated_at: ^now]]
end

defp online_query(provider_name, channel_names) do
from c in Channel,
where: c.type == ^provider_name and c.short_name in ^channel_names
end

defp update_online_channel(channel, live_channels, now) do
attrs =
live_channels
|> Map.get(channel.short_name, %{})
|> Map.merge(%{last_live_at: now, last_fetched_at: now})

Channels.update_channel_state(channel, attrs)
end
end
10 changes: 4 additions & 6 deletions lib/philomena/channels/picarto_channel.ex
Original file line number Diff line number Diff line change
@@ -1,30 +1,28 @@
defmodule Philomena.Channels.PicartoChannel do
@api_online "https://api.picarto.tv/api/v1/online?adult=true&gaming=true"

@spec live_channels(DateTime.t()) :: map()
def live_channels(now) do
@spec live_channels() :: map()
def live_channels do
@api_online
|> PhilomenaProxy.Http.get()
|> case do
{:ok, %{body: body, status: 200}} ->
body
|> Jason.decode!()
|> Map.new(&{&1["name"], fetch(&1, now)})
|> Map.new(&{&1["name"], fetch(&1)})

_error ->
%{}
end
end

defp fetch(api, now) do
defp fetch(api) do
%{
title: api["title"],
is_live: true,
nsfw: api["adult"],
viewers: api["viewers"],
thumbnail_url: api["thumbnails"]["web"],
last_fetched_at: now,
last_live_at: now,
description: nil
}
end
Expand Down
Loading

0 comments on commit 521ea49

Please sign in to comment.