{
"_id": "621ffdc136468d709f17ced9",
"id": "julien-c/wine-quality",
"modelId": "julien-c/wine-quality",
"author": "julien-c",
"sha": "66d2918cd4449c87e581c21b9b7d24a840ba1279",
"lastModified": "2023-10-06T12:56:32.000Z",
"private": false,
"disabled": false,
"gated": false,
"pipeline_tag": "tabular-classification",
"tags": [
"sklearn",
"joblib",
"tabular-classification",
"dataset:wine-quality",
"dataset:lvwerra/red-wine",
"has_space",
"region:us"
],
"downloads": 113,
"library_name": "sklearn",
"widgetData": [
{
"structuredData": {
"fixed_acidity": [
7.4,
7.8,
10.3
],
"volatile_acidity": [
0.7,
0.88,
0.32
],
"citric_acid": [
0,
0,
0.45
],
"residual_sugar": [
1.9,
2.6,
6.4
],
"chlorides": [
0.076,
0.098,
0.073
],
"free_sulfur_dioxide": [
11,
25,
5
],
"total_sulfur_dioxide": [
34,
67,
13
],
"density": [
0.9978,
0.9968,
0.9976
],
"pH": [
3.51,
3.2,
3.23
],
"sulphates": [
0.56,
0.68,
0.82
],
"alcohol": [
9.4,
9.8,
12.6
]
}
}
],
"likes": 13,
"model-index": null,
"config": {},
"cardData": {
"tags": [
"tabular-classification",
"sklearn"
],
"datasets": [
"wine-quality",
"lvwerra/red-wine"
],
"widget": [
{
"structuredData": {
"fixed_acidity": [
7.4,
7.8,
10.3
],
"volatile_acidity": [
0.7,
0.88,
0.32
],
"citric_acid": [
0,
0,
0.45
],
"residual_sugar": [
1.9,
2.6,
6.4
],
"chlorides": [
0.076,
0.098,
0.073
],
"free_sulfur_dioxide": [
11,
25,
5
],
"total_sulfur_dioxide": [
34,
67,
13
],
"density": [
0.9978,
0.9968,
0.9976
],
"pH": [
3.51,
3.2,
3.23
],
"sulphates": [
0.56,
0.68,
0.82
],
"alcohol": [
9.4,
9.8,
12.6
]
}
}
]
},
"spaces": [
"microsoft/HuggingGPT",
"taesiri/HuggingGPT-Lite",
"ccarr0807/HuggingGPT",
"theholycityweb/HuggingGPT",
"contluForse/HuggingGPT",
"Alfasign/HuggingGPT-Lite",
"saurshaz/HuggingGPT",
"zeajose/julien-c-wine-quality",
"awacke1/Tabular-Classifier-julien-c-wine-quality",
"keaneu/HuggingGPT",
"viscosity/HuggingGPT",
"Mcdof/HuggingGPT",
"BMukhtar/BMA",
"chrisW6825/HuggingGPT",
"Shenziqian/HuggingGPT",
"lokutus/HuggingGPT",
"mimiqiao/HuggingGPT",
"tsgbalakarthik/HuggingGPT",
"wowochkin/HuggingGPT",
"Msp/HuggingGPT",
"ryan12439/HuggingGPTpub",
"FANCHIYU/HuggingGPT",
"Betacuckgpt/HuggingGPT",
"cashqin/HuggingGPT",
"felixfriday/MICROSOFTT_JARVIS_HuggingGPT",
"Meffordh/HuggingGPT",
"lzqfree/HuggingGPT",
"bountyfuljr/HuggingGPTplaypublic",
"mearjunsha/HuggingGPT",
"turbowed/HuggingGPT",
"Chokyounghoon/HuggingGPT",
"lollo21/Will-GPT",
"Pfs2021Funny/HuggingGPT",
"irritablebro/HuggingGPT",
"MagKoz/HuggingGPT",
"zhangdream/HuggingGPT",
"calliber/HuggingGPT",
"Pitak/HuggingGPT",
"gaocegege/HuggingGPT",
"apgarmd/jarvis",
"apgarmd/jarvis2",
"mukulnag/HuggingGPT1",
"lugifudun/HuggingGPT",
"leadmaister/HuggingGPT",
"pors/HuggingGPT",
"vs4vijay/HuggingGPT",
"mckeeboards/HuggingGPT",
"mastere00/JarvisMeetsProfessor",
"passthebutter/HuggingGPT",
"manu1435/HuggingGPT",
"rafaelcalleja/HuggingGPT",
"NaamanSaif/HuggingGPT",
"CollaalloC/HuggingGPT",
"dwolfe66/HuggingGPT",
"xian-sheng/HuggingGPT",
"Aygtljl518866/HuggingGPT",
"Hemi1403/HuggingGPT",
"trhacknon/HuggingGPT",
"Vito99/HuggingGPT-Lite",
"EinfachOlder/HuggingGPT-Lite",
"innovativeillusions/HuggingGPT",
"ylavie/HuggingGPT3",
"ylavie/HuggingGPT-Lite",
"CCYAO/HuggingGPT",
"dcams/HuggingGPT",
"cndavy/HuggingGPT",
"ZackBradshaw/omni_bot",
"ertiaM/julien-c-wine-quality"
],
"siblings": [
{
"rfilename": ".gitattributes"
},
{
"rfilename": "README.md"
},
{
"rfilename": "config.yml"
},
{
"rfilename": "sklearn_model.joblib"
},
{
"rfilename": "winequality-red.csv"
}
],
"createdAt": "2022-03-02T23:29:05.000Z"
}