-
Notifications
You must be signed in to change notification settings - Fork 28
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
feat: add support for text data & tokenization
- Loading branch information
Showing
39 changed files
with
468 additions
and
188 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file was deleted.
Oops, something went wrong.
11 changes: 11 additions & 0 deletions
11
discojs/discojs-core/src/dataset/data/preprocessing/base.ts
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,11 @@ | ||
import { tf, Task } from '../../..' | ||
import { ImagePreprocessing } from './image_preprocessing' | ||
import { TabularPreprocessing } from './tabular_preprocessing' | ||
import { TextPreprocessing } from './text_preprocessing' | ||
|
||
export type Preprocessing = ImagePreprocessing | TextPreprocessing | TabularPreprocessing | ||
|
||
export interface PreprocessingFunction { | ||
type: Preprocessing | ||
apply: (x: tf.TensorContainer, task: Task) => tf.TensorContainer | ||
} |
44 changes: 44 additions & 0 deletions
44
discojs/discojs-core/src/dataset/data/preprocessing/image_preprocessing.ts
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,44 @@ | ||
import { Task, tf } from '../../..' | ||
import { PreprocessingFunction } from './base' | ||
|
||
import { List } from 'immutable' | ||
|
||
export enum ImagePreprocessing { | ||
Resize, | ||
Normalize | ||
} | ||
|
||
interface ImageEntry extends tf.TensorContainerObject { | ||
xs: tf.Tensor3D | tf.Tensor4D | ||
ys: tf.Tensor1D | number | undefined | ||
} | ||
|
||
const resize: PreprocessingFunction = { | ||
type: ImagePreprocessing.Resize, | ||
apply: (entry: tf.TensorContainer, task: Task): tf.TensorContainer => { | ||
const { xs, ys } = entry as ImageEntry | ||
const params = task.trainingInformation | ||
return { | ||
xs: params.IMAGE_W !== undefined && params.IMAGE_H !== undefined | ||
? xs.resizeBilinear([params.IMAGE_H, params.IMAGE_W]) | ||
: xs, | ||
ys | ||
} | ||
} | ||
} | ||
|
||
const normalize: PreprocessingFunction = { | ||
type: ImagePreprocessing.Normalize, | ||
apply: (entry: tf.TensorContainer, task: Task): tf.TensorContainer => { | ||
const { xs, ys } = entry as ImageEntry | ||
return { | ||
xs: xs.div(tf.scalar(255)), | ||
ys | ||
} | ||
} | ||
} | ||
|
||
export const AVAILABLE_PREPROCESSING = List.of( | ||
resize, | ||
normalize | ||
).sortBy((e) => e.type) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,4 @@ | ||
export { Preprocessing, PreprocessingFunction } from './base' | ||
export { AVAILABLE_PREPROCESSING as IMAGE_PREPROCESSING, ImagePreprocessing } from './image_preprocessing' | ||
export { AVAILABLE_PREPROCESSING as TABULAR_PREPROCESSING, TabularPreprocessing } from './tabular_preprocessing' | ||
export { AVAILABLE_PREPROCESSING as TEXT_PREPROCESSING, TextPreprocessing } from './text_preprocessing' |
9 changes: 9 additions & 0 deletions
9
discojs/discojs-core/src/dataset/data/preprocessing/tabular_preprocessing.ts
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,9 @@ | ||
import { List } from 'immutable' | ||
import { PreprocessingFunction } from './base' | ||
|
||
export enum TabularPreprocessing { | ||
Sanitize, | ||
Normalize | ||
} | ||
|
||
export const AVAILABLE_PREPROCESSING = List<PreprocessingFunction>() |
67 changes: 67 additions & 0 deletions
67
discojs/discojs-core/src/dataset/data/preprocessing/text_preprocessing.ts
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,67 @@ | ||
import { Task, tf } from '../../..' | ||
import { PreprocessingFunction } from './base' | ||
|
||
import GPT3Tokenizer from 'gpt3-tokenizer' | ||
import { List } from 'immutable' | ||
|
||
export enum TextPreprocessing { | ||
Tokenize, | ||
Padding | ||
} | ||
|
||
interface TextEntry extends tf.TensorContainerObject { | ||
xs: string[] | ||
ys: number[] | ||
} | ||
|
||
interface TokenizedEntry extends tf.TensorContainerObject { | ||
xs: tf.Tensor1D | ||
ys: tf.Tensor1D | ||
} | ||
|
||
const gpt3Tokenizer = new GPT3Tokenizer({ type: 'gpt3' }) | ||
|
||
const padding: PreprocessingFunction = { | ||
type: TextPreprocessing.Padding, | ||
apply: (x: tf.TensorContainer, task: Task) => { | ||
const { xs, ys } = x as TokenizedEntry | ||
// TODO: add to task definition | ||
const maxLength = 64 | ||
if (maxLength === undefined) { | ||
return { xs, ys } | ||
} | ||
return { | ||
xs: xs | ||
.pad([[0, Math.max(0, maxLength - xs.size)]]) | ||
.slice([0], [maxLength]), | ||
ys | ||
} | ||
} | ||
} | ||
|
||
const tokenize: PreprocessingFunction = { | ||
type: TextPreprocessing.Tokenize, | ||
apply: (x: tf.TensorContainer, task: Task) => { | ||
const { xs, ys } = x as TextEntry | ||
const params = task.trainingInformation | ||
// TODO: add to task definition | ||
const tokenizer = (params as unknown as any).tokenizer | ||
|
||
let tokenized: number[] | ||
if (tokenizer === undefined) { | ||
tokenized = gpt3Tokenizer.encode(xs[0]).bpe | ||
} else { | ||
throw new Error('tokenizer not implemented') | ||
} | ||
|
||
return { | ||
xs: tf.tensor(tokenized), | ||
ys: tf.tensor(ys) | ||
} | ||
} | ||
} | ||
|
||
export const AVAILABLE_PREPROCESSING = List.of( | ||
tokenize, | ||
padding | ||
) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,20 @@ | ||
import { Task } from '../..' | ||
import { Dataset } from '../dataset' | ||
import { Data } from './data' | ||
import { TEXT_PREPROCESSING } from './preprocessing' | ||
|
||
export class TextData extends Data { | ||
public readonly availablePreprocessing = TEXT_PREPROCESSING | ||
|
||
static async init ( | ||
dataset: Dataset, | ||
task: Task, | ||
size?: number | ||
): Promise<TextData> { | ||
return new TextData(dataset, task, size) | ||
} | ||
|
||
protected create (dataset: Dataset, task: Task, size?: number): TextData { | ||
return new TextData(dataset, task, size) | ||
} | ||
} |
Oops, something went wrong.