codeShare commited on
Commit
4a95bcb
Β·
verified Β·
1 Parent(s): d3c7333

Upload fusion_t2i_CLIP_interrogator.ipynb

Browse files
Google Colab Jupyter Notebooks/fusion_t2i_CLIP_interrogator.ipynb CHANGED
@@ -2935,7 +2935,7 @@
2935
  ]
2936
  }
2937
  },
2938
- "execution_count": 1,
2939
  "outputs": [
2940
  {
2941
  "output_type": "stream",
@@ -3120,8 +3120,8 @@
3120
  "\n",
3121
  "%cd {home_directory + 'fusion-t2i-generator-data/' + 'reference'}\n",
3122
  "references = torch.load('reference_text_and_image_encodings.pt' , weights_only=False)\n",
3123
- "reference = torch.add(reference, C * references[index][0].dequantize())\n",
3124
- "reference = torch.add(reference, (1-C) * references[index][1].dequantize())\n",
3125
  "references = ''\n",
3126
  "# @markdown -----------\n",
3127
  "# @markdown πŸ“βž• Enhance similarity to prompt(s)\n",
@@ -3147,7 +3147,8 @@
3147
  "# @markdown -----------\n",
3148
  "# @markdown ⏩ Skip item(s) containing the word(s)\n",
3149
  "SKIP = 'futa ' # @param {type:'string' , placeholder:'item1 , item2 , ...'}\n",
3150
- "def isBlacklisted(txt):\n",
 
3151
  " if txt.strip().isnumeric(): return True\n",
3152
  " if blacklist.strip() == '': return False\n",
3153
  " for item in list(blacklist.split(',')):\n",
@@ -3155,6 +3156,7 @@
3155
  " if txt.find(item.strip())> -1 : return True\n",
3156
  " #------#\n",
3157
  " return False\n",
 
3158
  "# @markdown -----------\n",
3159
  "# @markdown πŸ” How similar should the results be?\n",
3160
  "list_size = 1000 # @param {type:'number'}\n",
@@ -3198,7 +3200,9 @@
3198
  "#---#\n",
3199
  " output = '{'\n",
3200
  " for _index in range(list_size):\n",
3201
- " output = output + prompts[f'{indices[min(_index+start_at_index,NUM_VOCAB_ITEMS-1)].item()}'] + '|'\n",
 
 
3202
  " #---------#\n",
3203
  " output = (output + '}').replace('|}' , '} ')\n",
3204
  " for iter in range(N):\n",
@@ -3536,93 +3540,12 @@
3536
  "execution_count": null,
3537
  "outputs": []
3538
  },
3539
- {
3540
- "cell_type": "code",
3541
- "source": [
3542
- "ref"
3543
- ],
3544
- "metadata": {
3545
- "id": "J-IUkhBXe_a2",
3546
- "outputId": "a4e1c5b2-9d10-4113-ccf8-43fd25b32749",
3547
- "colab": {
3548
- "base_uri": "https://localhost:8080/"
3549
- }
3550
- },
3551
- "execution_count": 3,
3552
- "outputs": [
3553
- {
3554
- "output_type": "execute_result",
3555
- "data": {
3556
- "text/plain": [
3557
- "tensor([[ 4, 254, 7, 255, 3, 1, 1, 253, 1, 0, 0, 0, 3, 1,\n",
3558
- " 249, 2, 255, 7, 3, 3, 253, 0, 4, 1, 253, 249, 7, 2,\n",
3559
- " 5, 255, 254, 254, 253, 249, 254, 7, 5, 254, 255, 249, 252, 255,\n",
3560
- " 6, 0, 254, 253, 3, 2, 2, 253, 250, 4, 8, 254, 253, 3,\n",
3561
- " 1, 0, 0, 253, 0, 7, 2, 3, 254, 2, 7, 0, 0, 253,\n",
3562
- " 252, 7, 3, 1, 1, 2, 252, 4, 5, 252, 255, 3, 5, 253,\n",
3563
- " 4, 2, 0, 1, 0, 2, 4, 4, 252, 7, 255, 253, 253, 2,\n",
3564
- " 4, 3, 254, 249, 0, 253, 0, 254, 0, 253, 6, 0, 0, 255,\n",
3565
- " 0, 254, 252, 0, 250, 253, 249, 255, 252, 252, 0, 1, 5, 1,\n",
3566
- " 5, 3, 1, 251, 254, 242, 250, 254, 252, 5, 5, 9, 254, 9,\n",
3567
- " 9, 0, 0, 253, 1, 0, 3, 255, 255, 2, 2, 255, 4, 254,\n",
3568
- " 254, 255, 4, 254, 253, 7, 255, 3, 1, 5, 252, 2, 0, 255,\n",
3569
- " 5, 252, 255, 252, 3, 1, 3, 2, 254, 243, 252, 2, 8, 3,\n",
3570
- " 255, 2, 254, 2, 254, 0, 254, 252, 253, 4, 254, 1, 255, 232,\n",
3571
- " 253, 7, 2, 255, 0, 0, 254, 2, 253, 255, 254, 2, 7, 251,\n",
3572
- " 255, 252, 255, 255, 254, 5, 5, 8, 255, 6, 0, 255, 253, 254,\n",
3573
- " 3, 254, 2, 4, 8, 251, 255, 253, 2, 254, 7, 255, 250, 4,\n",
3574
- " 3, 251, 1, 252, 3, 5, 255, 6, 255, 3, 2, 6, 3, 1,\n",
3575
- " 1, 250, 253, 0, 252, 5, 251, 11, 255, 255, 254, 1, 3, 255,\n",
3576
- " 252, 248, 254, 254, 255, 2, 255, 250, 252, 254, 254, 2, 7, 7,\n",
3577
- " 253, 249, 0, 4, 4, 1, 5, 2, 238, 255, 254, 254, 252, 0,\n",
3578
- " 248, 1, 254, 0, 0, 2, 254, 255, 252, 0, 255, 253, 254, 255,\n",
3579
- " 1, 254, 253, 253, 254, 255, 4, 255, 112, 253, 251, 9, 0, 251,\n",
3580
- " 5, 1, 254, 8, 252, 254, 0, 5, 254, 5, 254, 0, 255, 2,\n",
3581
- " 252, 252, 2, 1, 253, 251, 251, 254, 0, 3, 250, 255, 5, 7,\n",
3582
- " 1, 2, 2, 255, 3, 253, 2, 253, 254, 0, 253, 1, 3, 8,\n",
3583
- " 7, 6, 13, 1, 1, 4, 4, 1, 1, 250, 0, 2, 250, 255,\n",
3584
- " 1, 251, 7, 1, 252, 255, 2, 252, 2, 1, 2, 7, 0, 4,\n",
3585
- " 0, 250, 251, 251, 4, 0, 255, 8, 9, 4, 5, 0, 17, 0,\n",
3586
- " 3, 0, 254, 6, 250, 1, 254, 243, 254, 253, 255, 1, 254, 251,\n",
3587
- " 249, 3, 0, 1, 1, 2, 2, 5, 3, 0, 248, 2, 9, 254,\n",
3588
- " 2, 9, 0, 2, 255, 5, 138, 0, 1, 1, 255, 249, 4, 0,\n",
3589
- " 254, 253, 236, 252, 3, 0, 255, 9, 6, 1, 250, 0, 2, 3,\n",
3590
- " 2, 9, 252, 2, 4, 255, 251, 6, 4, 252, 2, 255, 2, 253,\n",
3591
- " 253, 250, 251, 253, 1, 4, 251, 250, 251, 255, 4, 252, 6, 6,\n",
3592
- " 254, 2, 241, 0, 1, 6, 2, 247, 1, 1, 4, 250, 254, 4,\n",
3593
- " 253, 1, 253, 1, 247, 2, 2, 249, 3, 2, 5, 253, 255, 253,\n",
3594
- " 254, 252, 1, 253, 5, 2, 4, 5, 0, 3, 239, 254, 250, 1,\n",
3595
- " 5, 253, 7, 2, 2, 3, 1, 255, 254, 2, 4, 255, 2, 1,\n",
3596
- " 0, 0, 0, 1, 4, 254, 4, 0, 3, 5, 3, 1, 0, 253,\n",
3597
- " 18, 3, 253, 255, 252, 6, 3, 255, 254, 253, 2, 252, 0, 254,\n",
3598
- " 253, 254, 252, 255, 255, 250, 4, 1, 2, 5, 249, 251, 250, 1,\n",
3599
- " 250, 250, 3, 4, 255, 3, 2, 1, 0, 254, 2, 2, 255, 6,\n",
3600
- " 1, 3, 3, 11, 0, 2, 2, 249, 1, 0, 255, 0, 1, 4,\n",
3601
- " 3, 246, 250, 0, 4, 248, 1, 3, 247, 11, 3, 2, 6, 0,\n",
3602
- " 253, 1, 251, 2, 2, 252, 254, 246, 0, 252, 252, 3, 255, 2,\n",
3603
- " 13, 2, 1, 255, 3, 253, 0, 254, 251, 253, 6, 255, 0, 1,\n",
3604
- " 254, 255, 248, 251, 1, 253, 252, 255, 0, 253, 2, 2, 252, 8,\n",
3605
- " 255, 5, 251, 1, 3, 249, 4, 253, 255, 1, 4, 251, 255, 4,\n",
3606
- " 254, 3, 1, 254, 255, 1, 245, 3, 6, 1, 1, 5, 254, 255,\n",
3607
- " 2, 5, 0, 243, 254, 254, 255, 253, 251, 0, 251, 2, 2, 4,\n",
3608
- " 1, 252, 255, 2, 3, 255, 0, 0, 255, 9, 12, 12, 9, 3,\n",
3609
- " 248, 5, 254, 0, 1, 1, 9, 255, 6, 1, 2, 6, 2, 0,\n",
3610
- " 255, 0, 254, 3, 6, 2, 251, 253, 252, 0, 5, 253, 248, 245,\n",
3611
- " 0, 254, 1, 254, 250, 252, 5, 5, 5, 1, 254, 0]],\n",
3612
- " dtype=torch.uint8)"
3613
- ]
3614
- },
3615
- "metadata": {},
3616
- "execution_count": 3
3617
- }
3618
- ]
3619
- },
3620
  {
3621
  "cell_type": "code",
3622
  "source": [
3623
  "\n",
3624
  "# @title \tβš„ New code (work in progress)\n",
3625
- "_ref = 'And I can still taste your skin' # @param {type:'string' , placeholder:'type a single prompt to match'}\n",
3626
  "LIST_SIZE = 1000 # @param {type:'number' , placeholder:'set how large the list should be'}\n",
3627
  "\n",
3628
  "SCALE = 0.0043\n",
@@ -3651,8 +3574,6 @@
3651
  " return 1\n",
3652
  "#----------#\n",
3653
  "\n",
3654
- "\n",
3655
- "\n",
3656
  "inputs = tokenizer(text = _ref.strip(), truncation = True , padding=True, return_tensors=\"pt\")\n",
3657
  "ref = model.get_text_features(**inputs)[0]\n",
3658
  "\n",
@@ -3663,13 +3584,12 @@
3663
  "\n",
3664
  "vocab = load_file(url)\n",
3665
  "\n",
3666
- "\n",
3667
  "#get_most_similiar_items_to(ref , url , LIST_SIZE)"
3668
  ],
3669
  "metadata": {
3670
  "id": "PGyLzCmYqCPg"
3671
  },
3672
- "execution_count": 14,
3673
  "outputs": []
3674
  }
3675
  ]
 
2935
  ]
2936
  }
2937
  },
2938
+ "execution_count": null,
2939
  "outputs": [
2940
  {
2941
  "output_type": "stream",
 
3120
  "\n",
3121
  "%cd {home_directory + 'fusion-t2i-generator-data/' + 'reference'}\n",
3122
  "references = torch.load('reference_text_and_image_encodings.pt' , weights_only=False)\n",
3123
+ "reference = torch.add(reference, prompt_strength * C * references[index][0].dequantize())\n",
3124
+ "reference = torch.add(reference, prompt_strength * (1-C) * references[index][1].dequantize())\n",
3125
  "references = ''\n",
3126
  "# @markdown -----------\n",
3127
  "# @markdown πŸ“βž• Enhance similarity to prompt(s)\n",
 
3147
  "# @markdown -----------\n",
3148
  "# @markdown ⏩ Skip item(s) containing the word(s)\n",
3149
  "SKIP = 'futa ' # @param {type:'string' , placeholder:'item1 , item2 , ...'}\n",
3150
+ "\n",
3151
+ "def isBlacklisted(txt, blacklist):\n",
3152
  " if txt.strip().isnumeric(): return True\n",
3153
  " if blacklist.strip() == '': return False\n",
3154
  " for item in list(blacklist.split(',')):\n",
 
3156
  " if txt.find(item.strip())> -1 : return True\n",
3157
  " #------#\n",
3158
  " return False\n",
3159
+ "\n",
3160
  "# @markdown -----------\n",
3161
  "# @markdown πŸ” How similar should the results be?\n",
3162
  "list_size = 1000 # @param {type:'number'}\n",
 
3200
  "#---#\n",
3201
  " output = '{'\n",
3202
  " for _index in range(list_size):\n",
3203
+ " tmp = prompts[f'{indices[min(_index+start_at_index,NUM_VOCAB_ITEMS-1)].item()}']\n",
3204
+ " if isBlacklisted(tmp , SKIP): continue\n",
3205
+ " output = output + tmp + '|'\n",
3206
  " #---------#\n",
3207
  " output = (output + '}').replace('|}' , '} ')\n",
3208
  " for iter in range(N):\n",
 
3540
  "execution_count": null,
3541
  "outputs": []
3542
  },
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3543
  {
3544
  "cell_type": "code",
3545
  "source": [
3546
  "\n",
3547
  "# @title \tβš„ New code (work in progress)\n",
3548
+ "_ref = '' # @param {type:'string' , placeholder:'type a single prompt to match'}\n",
3549
  "LIST_SIZE = 1000 # @param {type:'number' , placeholder:'set how large the list should be'}\n",
3550
  "\n",
3551
  "SCALE = 0.0043\n",
 
3574
  " return 1\n",
3575
  "#----------#\n",
3576
  "\n",
 
 
3577
  "inputs = tokenizer(text = _ref.strip(), truncation = True , padding=True, return_tensors=\"pt\")\n",
3578
  "ref = model.get_text_features(**inputs)[0]\n",
3579
  "\n",
 
3584
  "\n",
3585
  "vocab = load_file(url)\n",
3586
  "\n",
 
3587
  "#get_most_similiar_items_to(ref , url , LIST_SIZE)"
3588
  ],
3589
  "metadata": {
3590
  "id": "PGyLzCmYqCPg"
3591
  },
3592
+ "execution_count": null,
3593
  "outputs": []
3594
  }
3595
  ]