Update README.md
Browse files
    	
        README.md
    CHANGED
    
    | @@ -13,4 +13,28 @@ tags: | |
| 13 |  | 
| 14 |  | 
| 15 | 
             
            # 1.Usage:
         | 
|  | |
|  | |
|  | |
|  | |
|  | |
| 16 |  | 
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | 
|  | |
| 13 |  | 
| 14 |  | 
| 15 | 
             
            # 1.Usage:
         | 
| 16 | 
            +
            ```python
         | 
| 17 | 
            +
            # Use a pipeline as a high-level helper
         | 
| 18 | 
            +
            from transformers import pipeline
         | 
| 19 | 
            +
            import torch
         | 
| 20 | 
            +
            model_id = "xbrain/text2sql-8b-instruct-v1"
         | 
| 21 |  | 
| 22 | 
            +
             | 
| 23 | 
            +
            messages = [
         | 
| 24 | 
            +
                {"role": "system", 
         | 
| 25 | 
            +
                 "content": "I want you to act as a SQL terminal in front of an example database, you need only to return the sql command to me.Below is an instruction that describes a task, Write a response that appropriately completes the request.\n\"\n##Instruction:\n database contains tables such as table_name_30. Table table_name_30 has columns such as nfl_team, draft_year."},
         | 
| 26 | 
            +
                {"role": "user", 
         | 
| 27 | 
            +
                 "content": "###Input:\nIn 1978 what is the NFL team?\n\n###Response:"},
         | 
| 28 | 
            +
            ]
         | 
| 29 | 
            +
            pipe_msg = pipeline(
         | 
| 30 | 
            +
                "text-generation",
         | 
| 31 | 
            +
                model=model_id,
         | 
| 32 | 
            +
                model_kwargs={"torch_dtype": torch.bfloat16},
         | 
| 33 | 
            +
                device_map="auto",)
         | 
| 34 | 
            +
             | 
| 35 | 
            +
            outputs = pipe_msg(
         | 
| 36 | 
            +
                messages,
         | 
| 37 | 
            +
                max_new_tokens=256,
         | 
| 38 | 
            +
            )
         | 
| 39 | 
            +
            print(outputs[0]["generated_text"][-1])
         | 
| 40 | 
            +
            ```
         | 
