Skip to content
This repository was archived by the owner on Dec 2, 2022. It is now read-only.

Add AnimeGG website #95

Open
wants to merge 28 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
28 commits
Select commit Hold shift + click to select a range
3d07a3e
There's something i guess
Kapilarny Apr 22, 2022
42b3cfb
commit
Kapilarny Apr 22, 2022
54a58df
Preparing for adding animepisode and desu-online
Kapilarny Apr 22, 2022
bef7935
Add requirements.txt
artrixdotdev Apr 22, 2022
1b4c583
Update requirements.txt
artrixdotdev Apr 22, 2022
188ec1b
Add Animepisode to main
artrixdotdev Apr 22, 2022
2b3231e
Add Animepisode scraper
artrixdotdev Apr 22, 2022
9b3f028
Added desu-online.pl support for ScrapperEngine
Kapilarny Apr 22, 2022
97573ac
Add new websites to WebsiteScraperService.cs
Kapilarny Apr 22, 2022
f398706
Fix anime matching on Animepisode since they use peculiar names for t…
artrixdotdev Apr 22, 2022
72dae7a
Added loop for all video qualities
Kapilarny Apr 23, 2022
a7a7384
Added search for desuonline
Kapilarny Apr 23, 2022
563b61b
Added Full Support for desu-online.pl :D
Kapilarny Apr 23, 2022
f373ffc
Add Animegg resource
artrixdotdev Apr 24, 2022
d08b16c
Optimalization for desuonline, and some bug fixes
Kapilarny Apr 25, 2022
25aaca5
Add animegg support
artrixdotdev Apr 28, 2022
775cc9c
Merge branch 'main' into animegg
artrixdotdev Apr 28, 2022
1088a31
Merge pull request #1 from Kapilarny/animegg
artrixdotdev Apr 28, 2022
d61c5de
AnimeGG fully scrapped
Kapilarny Apr 29, 2022
4c143ee
Added full support for AnimeGG
Kapilarny Apr 29, 2022
2e56535
Fix some merge conflicts
Kapilarny Apr 29, 2022
7d33986
Merge branch 'main'
Kapilarny Apr 29, 2022
87da999
Merge
Kapilarny Apr 29, 2022
7759c98
Merge branch 'AniAPI-Team-main'
Kapilarny Apr 29, 2022
41a04b7
Delete localizations.1.1.5.json
Kapilarny Apr 29, 2022
2e489a0
Some merge conflicts
Kapilarny Apr 29, 2022
3703d6b
Merge conflict stuff
Kapilarny Apr 29, 2022
87922e4
a lil fix
Kapilarny Apr 29, 2022
File filter

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 3 additions & 1 deletion ScraperEngine/main.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import falcon
import falcon.asgi
from resources.animegg import AnimeggResource
from resources.animeworld import AnimeworldResource

from resources.dreamsub import DreamsubResource
Expand All @@ -15,5 +16,6 @@
AnimeworldResource(app)
GogoanimeResource(app)
DesuonlineResource(app)
AnimeggResource(app)

AniplaylistResource(app)
AniplaylistResource(app)
63 changes: 63 additions & 0 deletions ScraperEngine/resources/animegg.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
from operator import is_
import falcon
import aiohttp
from utils.session import execute_proxied_request

from falcon import uri
from typing import List
from bs4 import BeautifulSoup
from interfaces.resource import ScraperResource
from models.episode import Episode
from models.matching import Matching


class AnimeggResource(ScraperResource):

def __init__(self, app: falcon.App) -> None:
# On this line, use the name you used inside MongoDB's websites collection
super().__init__(app, "animegg")

async def get_possible_matchings(self, res: falcon.Response, title: str) -> List[Matching]:
matchings = []
url = f"{self.base_url}/search/?q={uri.encode(title)}"
try:
page = await execute_proxied_request(self, url)
# Search results class is "mse"
results = page.find_all(class_="mse")
for result in results:
url = result.get("href")
title = result.select_one(".searchre > .media-body > .first > h2").text
matchings.append(Matching(title, url))
except Exception as e:
print(str(e))
raise
return matchings

async def get_episode(self, res: falcon.Response, path: str, number: int) -> List[Episode]:
episodes = []
series_name = uri.encode(path.split("/")[-1].replace(" ", "-").replace(",", ""))
video_url = f"{self.base_url}/{series_name}-episode-{number}"
try:
page = await execute_proxied_request(self, video_url)
links = page.find("ul", id="videos").find_next("li").find_all("a")
for video in links:
embed_id = video["data-id"]

is_dub = video["data-version"] == "dubbed"
vid_type = 'subbed'
if is_dub:
vid_type = "dubbed"

quality_text = video.select_one("span.btn-hd").text
quality = 1080 if quality_text == "HD" else 480
embed_url = f"{self.base_url}/embed/{embed_id}"
page = await execute_proxied_request(self, embed_url)
video_path = page.select_one("meta[property='og:video']")["content"]
video_url = f"{self.base_url}{video_path}"
episodes.append(Episode(f"Episode {number}", f"{embed_url}#{vid_type}", video_url, quality, "mp4"))

except Exception as e:
print(str(e))
raise

return episodes
3 changes: 1 addition & 2 deletions ScraperEngine/resources/desuonline.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,8 +94,7 @@ async def get_episode(self, res: falcon.Response, path: str, number: int) -> Lis
episodes = []

url = f"{self.base_url}{path}-odcinek-{number}"
print(url)


try:
# This here works, but theres a faster method
# But in case there are any bugs with current approach u you can use this
Expand Down
32 changes: 32 additions & 0 deletions SyncService/Models/Websites/AnimeggWebsite.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
using Commons;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;

namespace SyncService.Models.Websites
{
public class AnimeggWebsite : IWebsite
{
public AnimeggWebsite(Website website) : base(website)
{
}

public override bool AnalyzeMatching(Anime anime, AnimeMatching matching, string sourceTitle)
{
if(matching.EpisodePath.Contains("#dubbed")) {
matching.isDub = true;
}
return base.AnalyzeMatching(anime, matching, sourceTitle);
}

public override Dictionary<string, string> GetVideoProxyHeaders(AnimeMatching matching, Dictionary<string, string> values = null)
{
return new Dictionary<string, string>
{
{ "referer", matching.EpisodePath.Replace("#dubbed", "").Replace("#subbed", "") }
};
}
}
}
3 changes: 3 additions & 0 deletions SyncService/Services/WebsiteScraperService.cs
Original file line number Diff line number Diff line change
Expand Up @@ -84,6 +84,9 @@ public override async Task Start(CancellationToken cancellationToken)
case "gogoanime":
iWeb = new GogoanimeWebsite(website);
break;
case "animegg":
iWeb = new AnimeggWebsite(website);
break;
case "desuonline":
iWeb = new DesuonlineWebsite(website);
break;
Expand Down
30 changes: 15 additions & 15 deletions WebAPI/Resources/localizations.1_1_5.json
Original file line number Diff line number Diff line change
@@ -1,16 +1,16 @@
{
"localizations": [
{
"i18n": "en",
"label": "English"
},
{
"i18n": "it",
"label": "Italian"
},
{
"i18n": "pl",
"label": "Polish"
}
]
}
"localizations": [
{
"i18n": "en",
"label": "English"
},
{
"i18n": "it",
"label": "Italian"
},
{
"i18n": "pl",
"label": "Polish"
}
]
}
2 changes: 1 addition & 1 deletion WebAPI/WebAPI.csproj
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,8 @@

<ItemGroup>
<EmbeddedResource Include="Resources\genres.1_0.json" />
<EmbeddedResource Include="Resources\localizations.1_1_5.json" />
<EmbeddedResource Include="Resources\localizations.1_0.json" />
<EmbeddedResource Include="Resources\localizations.1_1_5.json" />
</ItemGroup>

<ItemGroup>
Expand Down