{ "cells": [ { "cell_type": "code", "execution_count": 4, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "True" ] }, "execution_count": 4, "metadata": {}, "output_type": "execute_result" } ], "source": [ "import dotenv\n", "dotenv.load_dotenv()" ] }, { "cell_type": "code", "execution_count": 15, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "DatasetDict({\n", " train: Dataset({\n", " features: ['func', 'target', 'cwe', 'project', 'commit_id', 'hash', 'size', 'message'],\n", " num_rows: 264393\n", " })\n", " validation: Dataset({\n", " features: ['func', 'target', 'cwe', 'project', 'commit_id', 'hash', 'size', 'message'],\n", " num_rows: 33049\n", " })\n", " test: Dataset({\n", " features: ['func', 'target', 'cwe', 'project', 'commit_id', 'hash', 'size', 'message'],\n", " num_rows: 33050\n", " })\n", "})\n", "{'func': 'static boolean ReadICCProfile(j_decompress_ptr jpeg_info)\\n{\\n char\\n magick[12];\\n\\n ErrorManager\\n *error_manager;\\n\\n ExceptionInfo\\n *exception;\\n\\n Image\\n *image;\\n\\n MagickBooleanType\\n status;\\n\\n register ssize_t\\n i;\\n\\n register unsigned char\\n *p;\\n\\n size_t\\n length;\\n\\n StringInfo\\n *icc_profile,\\n *profile;\\n\\n /*\\n Read color profile.\\n */\\n length=(size_t) ((size_t) GetCharacter(jpeg_info) << 8);\\n length+=(size_t) GetCharacter(jpeg_info);\\n length-=2;\\n if (length <= 14)\\n {\\n while (length-- > 0)\\n if (GetCharacter(jpeg_info) == EOF)\\n break;\\n return(TRUE);\\n }\\n for (i=0; i < 12; i++)\\n magick[i]=(char) GetCharacter(jpeg_info);\\n if (LocaleCompare(magick,ICC_PROFILE) != 0)\\n {\\n /*\\n Not a ICC profile, return.\\n */\\n for (i=0; i < (ssize_t) (length-12); i++)\\n if (GetCharacter(jpeg_info) == EOF)\\n break;\\n return(TRUE);\\n }\\n (void) GetCharacter(jpeg_info); /* id */\\n (void) GetCharacter(jpeg_info); /* markers */\\n length-=14;\\n error_manager=(ErrorManager *) jpeg_info->client_data;\\n exception=error_manager->exception;\\n image=error_manager->image;\\n profile=BlobToStringInfo((const void *) NULL,length);\\n if (profile == (StringInfo *) NULL)\\n {\\n (void) ThrowMagickException(exception,GetMagickModule(),\\n ResourceLimitError,\"MemoryAllocationFailed\",\"`%s\\'\",image->filename);\\n return(FALSE);\\n }\\n error_manager->profile=profile;\\n p=GetStringInfoDatum(profile);\\n for (i=0; i < (ssize_t) length; i++)\\n {\\n int\\n c;\\n\\n c=GetCharacter(jpeg_info);\\n if (c == EOF)\\n break;\\n *p++=(unsigned char) c;\\n }\\n if (i != (ssize_t) length)\\n {\\n profile=DestroyStringInfo(profile);\\n (void) ThrowMagickException(exception,GetMagickModule(),\\n CorruptImageError,\"InsufficientImageDataInFile\",\"`%s\\'\",\\n image->filename);\\n return(FALSE);\\n }\\n error_manager->profile=NULL;\\n icc_profile=(StringInfo *) GetImageProfile(image,\"icc\");\\n if (icc_profile != (StringInfo *) NULL)\\n {\\n ConcatenateStringInfo(icc_profile,profile);\\n profile=DestroyStringInfo(profile);\\n }\\n else\\n {\\n status=SetImageProfile(image,\"icc\",profile,exception);\\n profile=DestroyStringInfo(profile);\\n if (status == MagickFalse)\\n {\\n (void) ThrowMagickException(exception,GetMagickModule(),\\n ResourceLimitError,\"MemoryAllocationFailed\",\"`%s\\'\",image->filename);\\n return(FALSE);\\n }\\n }\\n if (image->debug != MagickFalse)\\n (void) LogMagickEvent(CoderEvent,GetMagickModule(),\\n \"Profile: ICC, %.20g bytes\",(double) length);\\n return(TRUE);\\n}', 'target': 1, 'cwe': ['CWE-416'], 'project': 'ImageMagick', 'commit_id': '39f226a9c137f547e12afde972eeba7551124493', 'hash': 1.623740923374004e+38, 'size': 111, 'message': 'https://github.com/ImageMagick/ImageMagick/issues/1641'}\n" ] } ], "source": [ "from datasets import load_dataset, DatasetDict\n", "import os\n", "\n", "dataset = load_dataset(\"json\", data_files=\"diversevul_20230702.jsonl\")\n", "\n", "# Split into train/valid/test\n", "train_valid = dataset[\"train\"].train_test_split(test_size=0.2, seed=0, train_indices_cache_file_name=\"train.indices\")\n", "train_data, valid_data = train_valid[\"train\"], train_valid[\"test\"]\n", "valid_test = valid_data.train_test_split(test_size=0.5, seed=0, train_indices_cache_file_name=\"valid.indices\", test_indices_cache_file_name=\"test.indices\")\n", "valid_data, test_data = valid_test[\"train\"], valid_test[\"test\"]\n", "dataset = DatasetDict({\n", " \"train\": train_data,\n", " \"validation\": valid_data,\n", " \"test\": test_data,\n", "})\n", "\n", "print(dataset)\n", "print(dataset[\"train\"][0])" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "dataset.save_to_disk(\"diversevul\")" ] }, { "cell_type": "code", "execution_count": 6, "metadata": {}, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ "Pushing dataset shards to the dataset hub: 100%|██████████| 2/2 [00:00<00:00, 543.87it/s]\n", "\n", "\u001b[A\n", "\u001b[A\n", "\u001b[A\n", "\u001b[A\n", "\u001b[A\n", "\u001b[A\n", "\u001b[A\n", "\u001b[A\n", "\u001b[A\n", "\u001b[A\n", "\u001b[A\n", "\u001b[A\n", "\u001b[A\n", "\u001b[A\n", "\u001b[A\n", "\u001b[A\n", "\u001b[A\n", "\u001b[A\n", "\u001b[A\n", "\u001b[A\n", "\u001b[A\n", "\u001b[A\n", "\u001b[A\n", "Creating parquet from Arrow format: 100%|██████████| 34/34 [00:03<00:00, 8.88ba/s]\n", "Pushing dataset shards to the dataset hub: 100%|██████████| 1/1 [00:12<00:00, 12.34s/it]\n", "\n", "\u001b[A\n", "\u001b[A\n", "\u001b[A\n", "\u001b[A\n", "\u001b[A\n", "\u001b[A\n", "\u001b[A\n", "\u001b[A\n", "\u001b[A\n", "\u001b[A\n", "\u001b[A\n", "\u001b[A\n", "\u001b[A\n", "\u001b[A\n", "\u001b[A\n", "\u001b[A\n", "\u001b[A\n", "\u001b[A\n", "\u001b[A\n", "\u001b[A\n", "\u001b[A\n", "\u001b[A\n", "\u001b[A\n", "\u001b[A\n", "\u001b[A\n", "\u001b[A\n", "\u001b[A\n", "\u001b[A\n", "\u001b[A\n", "\u001b[A\n", "Creating parquet from Arrow format: 100%|██████████| 34/34 [00:03<00:00, 9.98ba/s]\n", "Pushing dataset shards to the dataset hub: 100%|██████████| 1/1 [00:11<00:00, 11.55s/it]\n", "Downloading metadata: 100%|██████████| 263/263 [00:00<00:00, 495kB/s]\n" ] } ], "source": [ "dataset.push_to_hub(\"benjis/diversevul\", token=os.getenv(\"HUGGINGFACEHUB_API_TOKEN\"))" ] } ], "metadata": { "kernelspec": { "display_name": "Python 3", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.10.12" }, "orig_nbformat": 4 }, "nbformat": 4, "nbformat_minor": 2 }