zR commited on
Commit
3291d4e
1 Parent(s): 90e901a
Files changed (3) hide show
  1. config.json +1 -1
  2. generation_config.json +1 -1
  3. modeling_cogvlm.py +3 -1
config.json CHANGED
@@ -21,7 +21,7 @@
21
  "template_version": "base",
22
  "tie_word_embeddings": false,
23
  "torch_dtype": "bfloat16",
24
- "transformers_version": "4.41.0",
25
  "use_cache": true,
26
  "vision_config": {
27
  "dropout_prob": 0.0,
 
21
  "template_version": "base",
22
  "tie_word_embeddings": false,
23
  "torch_dtype": "bfloat16",
24
+ "transformers_version": "4.43.1",
25
  "use_cache": true,
26
  "vision_config": {
27
  "dropout_prob": 0.0,
generation_config.json CHANGED
@@ -7,5 +7,5 @@
7
  "max_length": 2048,
8
  "top_p": 0.1,
9
  "top_k": 1,
10
- "transformers_version": "4.41.0"
11
  }
 
7
  "max_length": 2048,
8
  "top_p": 0.1,
9
  "top_k": 1,
10
+ "transformers_version": "4.43.1"
11
  }
modeling_cogvlm.py CHANGED
@@ -780,9 +780,11 @@ class CogVLMVideoForCausalLM(CogVLMPreTrainedModel):
780
  standardize_cache_format: bool = False,
781
  ) -> Dict[str, Any]:
782
  # update past_key_values
783
- model_kwargs["past_key_values"] = self._extract_past_from_model_output(
784
  outputs, standardize_cache_format=standardize_cache_format
785
  )
 
 
786
  if getattr(outputs, "state", None) is not None:
787
  model_kwargs["state"] = outputs.state
788
 
 
780
  standardize_cache_format: bool = False,
781
  ) -> Dict[str, Any]:
782
  # update past_key_values
783
+ cache_name, cache = self._extract_past_from_model_output(
784
  outputs, standardize_cache_format=standardize_cache_format
785
  )
786
+ model_kwargs[cache_name] = cache
787
+
788
  if getattr(outputs, "state", None) is not None:
789
  model_kwargs["state"] = outputs.state
790