Skip to content

Commit

Permalink
fix ci
Browse files Browse the repository at this point in the history
  • Loading branch information
minhthuc2502 committed Sep 6, 2024
1 parent be9ffd0 commit 89f245f
Show file tree
Hide file tree
Showing 3 changed files with 14 additions and 9 deletions.
9 changes: 4 additions & 5 deletions python/ctranslate2/converters/transformers.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,10 +8,11 @@

import numpy as np

import transformers

try:
import huggingface_hub
import torch
import transformers
except ImportError:
pass

Expand Down Expand Up @@ -1422,7 +1423,7 @@ def set_decoder(self, spec, module):


@register_loader("Gemma2Config")
class GemmaLoader(ModelLoader):
class Gemma2Loader(ModelLoader):
@property
def architecture_name(self):
return "Gemma2ForCausalLM"
Expand Down Expand Up @@ -1494,9 +1495,7 @@ def set_decoder(self, spec, module):
self.set_layer_norm(spec.layer_norm, module.norm)

for layer_spec, layer in zip(spec.layer, module.layers):
self.set_layer_norm(
layer_spec.input_layer_norm, layer.input_layernorm
)
self.set_layer_norm(layer_spec.input_layer_norm, layer.input_layernorm)

self.set_layer_norm(
layer_spec.post_attention_layer_norm, layer.post_attention_layernorm
Expand Down
12 changes: 9 additions & 3 deletions python/ctranslate2/specs/transformer_spec.py
Original file line number Diff line number Diff line change
Expand Up @@ -325,9 +325,15 @@ def __init__(

if pre_post_layer_norm:
self.input_layer_norm = common_spec.LayerNormSpec(rms_norm=rms_norm)
self.post_attention_layer_norm = common_spec.LayerNormSpec(rms_norm=rms_norm)
self.pre_feedforward_layer_norm = common_spec.LayerNormSpec(rms_norm=rms_norm)
self.post_feedforward_layer_norm = common_spec.LayerNormSpec(rms_norm=rms_norm)
self.post_attention_layer_norm = common_spec.LayerNormSpec(
rms_norm=rms_norm
)
self.pre_feedforward_layer_norm = common_spec.LayerNormSpec(
rms_norm=rms_norm
)
self.post_feedforward_layer_norm = common_spec.LayerNormSpec(
rms_norm=rms_norm
)

delattr(self.self_attention, "layer_norm")
delattr(self.ffn, "layer_norm")
Expand Down
2 changes: 1 addition & 1 deletion third_party/googletest
Submodule googletest updated 245 files

0 comments on commit 89f245f

Please sign in to comment.