Skip to content

Commit

Permalink
fix asurascans, fix resetscans
Browse files Browse the repository at this point in the history
  • Loading branch information
Yentis committed Jul 27, 2024
1 parent 10499bf commit becd1a7
Show file tree
Hide file tree
Showing 8 changed files with 151 additions and 28 deletions.
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "quasar-manga-reader",
"version": "2.23.13",
"version": "2.23.14",
"description": "Manga reader that scrapes manga sites for updates",
"productName": "Manga Reader",
"author": "Yentis#5218",
Expand Down
2 changes: 1 addition & 1 deletion src-cordova/config.xml
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
<?xml version='1.0' encoding='utf-8'?>
<widget id="org.cordova.manga.reader" version="2.23.13" xmlns="http://www.w3.org/ns/widgets" xmlns:cdv="http://cordova.apache.org/ns/1.0">
<widget id="org.cordova.manga.reader" version="2.23.14" xmlns="http://www.w3.org/ns/widgets" xmlns:cdv="http://cordova.apache.org/ns/1.0">
<name>Manga Reader</name>
<description>Manga reader that scrapes manga sites for updates</description>
<author email="[email protected]" href="http://cordova.io">
Expand Down
129 changes: 129 additions & 0 deletions src/classes/sites/asurascans.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,129 @@
import { BaseData, BaseSite } from './baseSite'
import { SiteType } from 'src/enums/siteEnum'
import PQueue from 'p-queue'
import moment from 'moment'
import { Manga } from 'src/classes/manga'
import HttpRequest from 'src/interfaces/httpRequest'
import { requestHandler } from 'src/services/requestService'
import { ContentType } from 'src/enums/contentTypeEnum'
import { parseHtmlFromString, parseNum, titleContainsQuery } from '../../utils/siteUtils'
import qs from 'qs'

class AsuraData extends BaseData {
chapterList?: Element
}

export class AsuraScans extends BaseSite {
siteType: SiteType

constructor() {
super()

this.siteType = SiteType.AsuraScans
this.requestQueue = new PQueue({ interval: 2000, intervalCap: 1 })
}

protected getChapterNum(data: AsuraData): number {
const chapterNum = parseNum(data.chapterNum?.textContent?.trim().split(' ')[1])
if (chapterNum !== 0) return chapterNum

const chapters = data.chapterList?.querySelectorAll('div h3:nth-child(1)')
if (!chapters) return 0

// Derive current chapter number based on last valid chapter number
for (const [index, chapter] of chapters.entries()) {
const curChapterNum = parseNum(chapter.textContent?.trim().split(' ')[1])
if (curChapterNum === 0) continue

return curChapterNum + index
}

return 0
}

protected getChapterDate(data: BaseData): string {
const chapterDate = moment(data.chapterDate?.textContent, 'MMMM DD YYYY')
if (chapterDate.isValid()) {
return chapterDate.fromNow()
} else {
return ''
}
}

protected getChapterUrl(data: BaseData): string {
return `${this.getUrl()}/series/${super.getChapterUrl(data)}`
}

protected getImage(data: BaseData): string {
return data.image?.getAttribute('content') ?? data.image?.getAttribute('src') ?? ''
}

protected async readUrlImpl(url: string): Promise<Error | Manga> {
const request: HttpRequest = { method: 'GET', url }
const response = await requestHandler.sendRequest(request)

const doc = await parseHtmlFromString(response.data)
const chapterList = doc.querySelectorAll('.scrollbar-thumb-themecolor')[0]
const chapterItem = chapterList?.querySelectorAll('div')[0]

const data = new AsuraData(url)
data.chapter = chapterItem?.querySelectorAll('a')[0]
data.chapterUrl = data.chapter
data.chapterNum = data.chapter
data.chapterDate = chapterItem?.querySelectorAll('h3')[1]
data.chapterList = chapterList
data.title = doc.querySelectorAll('.text-xl')[0]

const imageElements = doc.querySelectorAll('meta[property="og:image"]')
let image: Element | undefined
if (imageElements.length === 0) {
image = doc.querySelectorAll('meta[name="twitter:image"]')[0] ?? doc.querySelectorAll('img[alt="poster"]')[0]
} else image = imageElements[0]
data.image = image

return this.buildManga(data)
}

protected async searchImpl(query: string): Promise<Error | Manga[]> {
const queryString = qs.stringify({ name: query.replace(//g, "'") })
const request: HttpRequest = {
method: 'GET',
url: `${this.getUrl()}/series?${queryString}`,
headers: { 'Content-Type': `${ContentType.URLENCODED}; charset=UTF-8` },
}

const response = await requestHandler.sendRequest(request)
const doc = await parseHtmlFromString(response.data)
const mangaList: Manga[] = []

doc.querySelectorAll('.grid-cols-2 a').forEach((elem) => {
const url = `${this.getUrl()}/${elem.getAttribute('href') ?? ''}`

const manga = new Manga('', this.siteType)
const titleElem = elem.querySelectorAll('.font-bold')[1]
manga.title = titleElem?.textContent?.trim() || ''

const image = elem.querySelectorAll('img')[0]
const imageUrl = image?.getAttribute('src') || ''
manga.image = imageUrl

manga.chapter = titleElem?.nextElementSibling?.textContent?.trim() || 'Unknown'
manga.url = url ?? ''

const modifiedTitle = manga.title
.split(' ')
.filter((word) => !word.endsWith('...'))
.join(' ')

if (titleContainsQuery(modifiedTitle, query)) {
mangaList.push(manga)
}
})

return mangaList
}

getTestUrl(): string {
return `${this.getUrl()}/series/mookhyang-the-origin-105d9ca4`
}
}
7 changes: 0 additions & 7 deletions src/classes/sites/madara.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
import { BaseData, BaseSite } from './baseSite'
import { SiteType } from 'src/enums/siteEnum'
import PQueue from 'p-queue'
import moment from 'moment'
import { Manga } from 'src/classes/manga'
import HttpRequest from 'src/interfaces/httpRequest'
Expand Down Expand Up @@ -36,10 +35,6 @@ export class Madara extends BaseSite {
constructor(siteType: SiteType) {
super()
this.siteType = siteType

if (siteType === SiteType.AsuraScans) {
this.requestQueue = new PQueue({ interval: 2000, intervalCap: 1 })
}
}

protected getChapterNum(data: MadaraData): number {
Expand Down Expand Up @@ -182,8 +177,6 @@ export class Madara extends BaseSite {

getTestUrl(): string {
switch (this.siteType) {
case SiteType.AsuraScans:
return `${this.getUrl()}/?p=36483`
case SiteType.FlameComics:
return `${this.getUrl()}/series/the-way-of-the-househusband/`
case SiteType.LuminousScans:
Expand Down
8 changes: 4 additions & 4 deletions src/enums/siteEnum.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,12 +10,12 @@ export enum SiteType {
LSComic = 'lscomic.com',
HiperDEX = 'hiperdex.com',
ReaperScans = 'reaperscans.com',
AsuraScans = 'asuratoon.com',
AsuraScans = 'asuracomic.net',
Mangago = 'mangago.me',
ZeroScans = 'zscans.com',
Batoto = 'bato.to',
FlameComics = 'flamecomics.com',
ResetScans = 'resetscan.com',
ResetScans = 'reset-scans.co',
Cubari = 'cubari.moe',
LuminousScans = 'luminous-scans.com',
Tapas = 'tapas.io',
Expand All @@ -34,13 +34,13 @@ export enum SiteName {
'lscomic.com' = 'LS Comic',
'hiperdex.com' = 'HiperDEX',
'reaperscans.com' = 'Reaper Scans',
'asuratoon.com' = 'Asura Scans',
'asuracomic.net' = 'Asura Scans',
'mangago.me' = 'Mangago',
'kitsu.io' = 'Kitsu',
'zscans.com' = 'Zero Scans',
'bato.to' = 'Batoto',
'flamecomics.com' = 'Flame Comics',
'resetscan.com' = 'Reset Scans',
'reset-scans.co' = 'Reset Scans',
'cubari.moe' = 'Cubari / Guya',
'luminous-scans.com' = 'Luminous Scans',
'tapas.io' = 'Tapas',
Expand Down
3 changes: 2 additions & 1 deletion src/services/siteService.ts
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ import { Comikey } from 'src/classes/sites/comikey'
import { ReaperScans } from 'src/classes/sites/reaperscans'
import { TappyToon } from 'src/classes/sites/tappytoon'
import { ScyllaScans } from 'src/classes/sites/scyllascans'
import { AsuraScans } from 'src/classes/sites/asurascans'

const globalRequestQueue = new PQueue({
interval: 1000,
Expand All @@ -38,7 +39,7 @@ const siteMap = new Map<string, BaseSite>([
[SiteType.LSComic, new WordPress(SiteType.LSComic)],
[SiteType.HiperDEX, new WordPress(SiteType.HiperDEX)],
[SiteType.ReaperScans, new ReaperScans()],
[SiteType.AsuraScans, new Madara(SiteType.AsuraScans)],
[SiteType.AsuraScans, new AsuraScans()],
[SiteType.Mangago, new Mangago()],
[SiteType.ZeroScans, new ZeroScans()],
[SiteType.Batoto, new Batoto()],
Expand Down
22 changes: 10 additions & 12 deletions src/services/test/asurascans.ts
Original file line number Diff line number Diff line change
Expand Up @@ -12,33 +12,31 @@ export async function testAsuraScans(): Promise<void> {
if (!site) throw Error('Site not found')

await readUrl(site)
await readUrlAdvanced()
await readUrlAdvanced(site)
await search(site)
}

async function readUrl(site: BaseSite): Promise<void> {
const manga = await getMangaInfo(site.getTestUrl(), SITE_TYPE)
const desired = new Manga(site.getTestUrl(), SITE_TYPE)
desired.chapter = 'Chapter 47'
desired.image =
'https://img.asuracomics.com/unsafe/fit-in/720x936/https://asuratoon.com/wp-content/uploads/2021/02/ponytail_boy_.png'
desired.image = 'https://gg.asuracomic.net/storage/media/114/conversions/367e3d17-optimized.webp'
desired.title = 'Mookhyang The Origin'
desired.chapterUrl = 'https://asuratoon.com/4631981187-mookhyang-the-origin-chapter-47/'
desired.chapterUrl = 'https://asuracomic.net/series/mookhyang-the-origin-105d9ca4/chapter/47'
desired.chapterNum = 47
desired.chapterDate = '3 years ago'

mangaEqual(manga, desired)
}

async function readUrlAdvanced(): Promise<void> {
const testUrl = 'https://asuratoon.com/?p=36093'
async function readUrlAdvanced(site: BaseSite): Promise<void> {
const testUrl = `${site.getUrl()}/series/solo-bug-player-e800d15b`
const manga = await getMangaInfo(testUrl, SITE_TYPE)
const desired = new Manga(testUrl, SITE_TYPE)
desired.chapter = 'Chapter 88'
desired.image =
'https://img.asuracomics.com/unsafe/fit-in/720x936/https://asuratoon.com/wp-content/uploads/2021/02/cover4.gif'
desired.image = 'https://gg.asuracomic.net/storage/media/245/01J3BAR5EFJJSB84FC5GDZYSW7.webp'
desired.title = 'Solo Bug Player'
desired.chapterUrl = 'https://asuratoon.com/4631981187-solo-bug-player-chapter-88/'
desired.chapterUrl = 'https://asuracomic.net/series/solo-bug-player-e800d15b/chapter/88'
desired.chapterNum = 88

mangaEqual(manga, desired)
Expand All @@ -47,9 +45,9 @@ async function readUrlAdvanced(): Promise<void> {
async function search(site: BaseSite): Promise<void> {
const results = await searchManga(QUERY, SITE_TYPE)
const desired = new Manga(site.getTestUrl(), SITE_TYPE)
desired.image = 'https://asuratoon.com/wp-content/uploads/2021/02/ponytail_boy_-222x300.png'
desired.chapter = '47'
desired.url = 'https://asuratoon.com/?p=36483'
desired.image = 'https://gg.asuracomic.net/storage/media/114/conversions/367e3d17-thumb-small.webp'
desired.chapter = 'Chapter 45'
desired.url = 'https://asuracomic.net/series/mookhyang-the-origin-105d9ca4'

return searchValid(results, desired, QUERY)
}
6 changes: 4 additions & 2 deletions src/utils/siteUtils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ export const siteAliases = [
{ url: 'www.asurascans.com', site: SiteType.AsuraScans },
{ url: 'asurascans.com', site: SiteType.AsuraScans },
{ url: 'asura.gg', site: SiteType.AsuraScans },
{ url: 'asuratoon.com', site: SiteType.AsuraScans },
{ url: 'asura.nacm.xyz', site: SiteType.AsuraScans },
{ url: 'leviatanscans.com', site: SiteType.LSComic },
{ url: 'en.leviatanscans.com', site: SiteType.LSComic },
Expand All @@ -26,6 +27,7 @@ export const siteAliases = [
{ url: 'reset-scans.com', site: SiteType.ResetScans },
{ url: 'reset-scans.us', site: SiteType.ResetScans },
{ url: 'reset-scans.xyz', site: SiteType.ResetScans },
{ url: 'resetscan.com', site: SiteType.ResetScans },
{ url: 'zeroscans.com', site: SiteType.ZeroScans },
{ url: 'scyllascans.org', site: SiteType.ScyllaScans },
{ url: 'luminousscans.com', site: SiteType.LuminousScans },
Expand Down Expand Up @@ -59,8 +61,8 @@ export function parseHtmlFromString(
export function titleContainsQuery(query: string, title?: string): boolean {
if (title === undefined) return false

query = query.replace('’', "'")
title = title.replace('’', "'")
query = query.replace(//g, "'")
title = title.replace(//g, "'")
const querySplit = query.toLowerCase().split(' ')

return querySplit.every((word) => title?.toLowerCase().includes(word))
Expand Down

0 comments on commit becd1a7

Please sign in to comment.