윤정환

코드, 데이터셋 업로드

#!/usr/bin/env python
# coding: utf-8
# In[ ]:
import pandas as pd
import numpy as np
import tensorflow as tf
from tensorflow import keras
from tensorflow.keras import layers
from pathlib import Path
import matplotlib.pyplot as plt
dataset_file = open("dataset.csv",'r')
df = pd.read_csv(dataset_file)
user_ids = df["userid"].unique().tolist()
user2user_encoded = {x: i for i, x in enumerate(user_ids)}
userencoded2user = {i: x for i, x in enumerate(user_ids)}
contents_ids = df["contentsid"].unique().tolist()
contents2contents_encoded = {x: i for i, x in enumerate(contents_ids)}
contents_encoded2contents = {i: x for i, x in enumerate(contents_ids)}
df["user"] = df["userid"].map(user2user_encoded)
df["contents"] = df["contentsid"].map(contents2contents_encoded)
num_users = len(user2user_encoded)
num_contents = len(contents_encoded2contents)
df["rating"] = df["rating"].values.astype(np.float32)
# min and max ratings will be used to normalize the ratings later
min_rating = 0.5
max_rating = 5.0
print(
"Number of users: {}, Number of Contents: {}, Min rating: {}, Max rating: {}".format(
num_users, num_contents, min_rating, max_rating
)
)
df = df.sample(frac=1, random_state=42)
x = df[["user", "contents"]].values
# Normalize the targets between 0 and 1. Makes it easy to train.
y = df["rating"].apply(lambda x: (x - min_rating) / (max_rating - min_rating)).values
# Assuming training on 90% of the data and validating on 10%.
train_indices = int(0.9 * df.shape[0])
x_train, x_val, y_train, y_val = (
x[:train_indices],
x[train_indices:],
y[:train_indices],
y[train_indices:],
)
EMBEDDING_SIZE = 50
class RecommenderNet(keras.Model):
def __init__(self, num_users, num_contents, embedding_size, **kwargs):
super(RecommenderNet, self).__init__(**kwargs)
self.num_users = num_users
self.num_contents = num_contents
self.embedding_size = embedding_size
self.user_embedding = layers.Embedding(
num_users,
embedding_size,
embeddings_initializer="he_normal",
embeddings_regularizer=keras.regularizers.l2(1e-6),
)
self.user_bias = layers.Embedding(num_users, 1)
self.contents_embedding = layers.Embedding(
num_contents,
embedding_size,
embeddings_initializer="he_normal",
embeddings_regularizer=keras.regularizers.l2(1e-6),
)
self.contents_bias = layers.Embedding(num_contents, 1)
def call(self, inputs):
user_vector = self.user_embedding(inputs[:, 0])
user_bias = self.user_bias(inputs[:, 0])
contents_vector = self.contents_embedding(inputs[:, 1])
contents_bias = self.contents_bias(inputs[:, 1])
dot_user_contents = tf.tensordot(user_vector, contents_vector, 2)
# Add all the components (including bias)
x = dot_user_contents + user_bias + contents_bias
# The sigmoid activation forces the rating to between 0 and 1
return tf.nn.sigmoid(x)
model = RecommenderNet(num_users, num_contents, EMBEDDING_SIZE)
model.compile(
optimizer='sgd',
loss='mse',
metrics=[tf.keras.metrics.MeanSquaredError()])
history = model.fit(
x=x_train,
y=y_train,
batch_size=2,
epochs=20,
verbose=1,
validation_data=(x_val, y_val),
)
plt.plot(history.history["loss"])
plt.plot(history.history["val_loss"])
plt.title("model loss")
plt.ylabel("loss")
plt.xlabel("epoch")
plt.legend(["train", "test"], loc="upper left")
plt.show()
test_file = open("dataset_test.csv",'r')
tf = pd.read_csv(test_file)
user_ids = tf["userid"].unique().tolist()
user2user_encoded = {x: i for i, x in enumerate(user_ids)}
userencoded2user = {i: x for i, x in enumerate(user_ids)}
contents_ids = tf["contentsid"].unique().tolist()
contents2contents_encoded = {x: i for i, x in enumerate(contents_ids)}
contents_encoded2contents = {i: x for i, x in enumerate(contents_ids)}
tf["user"] = tf["userid"].map(user2user_encoded)
tf["contents"] = tf["contentsid"].map(contents2contents_encoded)
tf["rating"] = tf["rating"].values.astype(np.float32)
tf = tf.sample(frac=1, random_state=42)
x = tf[["user", "contents"]].values
y = tf["rating"].apply(lambda x: (x - min_rating) / (max_rating - min_rating)).values
x_test, y_test = (x, y)
result = model.evaluate(x_test, y_test)
print(result)
#!/usr/bin/env python
# coding: utf-8
# In[ ]:
import pandas as pd
import numpy as np
import tensorflow as tf
from tensorflow import keras
from tensorflow.keras import layers
from pathlib import Path
import matplotlib.pyplot as plt
df_x = pd.read_csv("x_train.csv")
df_y = pd.read_csv("y_train.csv")
df = pd.concat([df_x, df_y], axis=1)
user_ids = df["userid"].unique().tolist()
user2user_encoded = {x: i for i, x in enumerate(user_ids)}
userencoded2user = {i: x for i, x in enumerate(user_ids)}
task_ids = df["taskid"].unique().tolist()
task2task_encoded = {x: i for i, x in enumerate(task_ids)}
task_encoded2task = {i: x for i, x in enumerate(task_ids)}
df["user"] = df["userid"].map(user2user_encoded)
df["task"] = df["taskid"].map(task2task_encoded)
num_users = len(user2user_encoded)
num_task = len(task_encoded2task)
df["rating"] = df["rating"].values.astype(np.float32)
# min and max ratings will be used to normalize the ratings later
MIN_RATING = 0.5
MAX_RATING = 5.0
print(
"Number of users: {}, Number of task: {}, Min rating: {}, Max rating: {}".format(
num_users, num_task, MIN_RATING, MAX_RATING
)
)
df = df.sample(frac=1, random_state=42)
x = df[["user", "task"]].values
# Normalize the targets between 0 and 1. Makes it easy to train.
y = df["rating"].apply(lambda x: (x - MIN_RATING) / (MAX_RATING - MIN_RATING)).values
# Assuming training on 90% of the data and validating on 10%.
train_indices = int(0.9 * df.shape[0])
x_train, x_val, y_train, y_val = (
x[:train_indices],
x[train_indices:],
y[:train_indices],
y[train_indices:],
)
EMBEDDING_SIZE = 128
class RecommenderNet(keras.Model):
def __init__(self, num_users, num_task, embedding_size, **kwargs):
super(RecommenderNet, self).__init__(**kwargs)
self.num_users = num_users
self.num_task = num_task
self.embedding_size = embedding_size
self.user_embedding = layers.Embedding(
num_users,
embedding_size,
embeddings_initializer="he_normal",
embeddings_regularizer=keras.regularizers.l2(1e-6),
)
self.user_bias = layers.Embedding(num_users, 1)
self.task_embedding = layers.Embedding(
num_task,
embedding_size,
embeddings_initializer="he_normal",
embeddings_regularizer=keras.regularizers.l2(1e-6),
)
self.task_bias = layers.Embedding(num_task, 1)
def call(self, inputs):
user_vector = self.user_embedding(inputs[:, 0])
user_bias = self.user_bias(inputs[:, 0])
task_vector = self.task_embedding(inputs[:, 1])
task_bias = self.task_bias(inputs[:, 1])
dot_user_task = tf.tensordot(user_vector, task_vector, 2)
# Add all the components (including bias)
x = dot_user_task + user_bias + task_bias
# The sigmoid activation forces the rating to between 0 and 1
return tf.nn.sigmoid(x)
model = RecommenderNet(num_users, num_task, EMBEDDING_SIZE)
model.compile(
optimizer='adam',
loss='mse',
metrics=[tf.keras.metrics.MeanSquaredError()])
history = model.fit(
x=x_train,
y=y_train,
batch_size=8,
epochs=300,
verbose=1,
validation_data=(x_val, y_val),
)
df_x_test = pd.read_csv('x_test.csv')
df_x_test["user"] = df_x_test["userid"].map(user2user_encoded)
df_x_test["task"] = df_x_test["taskid"].map(task2task_encoded)
x_test = df_x_test[["user", "task"]].values
y_pred = model.predict(x_test)
df_y_pred = pd.DataFrame(y_pred, columns=['rating'])
df_y_pred = df_y_pred["rating"].apply(lambda x: (x * (MAX_RATING - MIN_RATING) + MIN_RATING ))
df_y_pred.to_csv('y_pred.csv', sep=',', columns = ['rating'], index = False)
#evaluate
import os
import sys
import pandas as pd
from sklearn.metrics import mean_squared_error
gt = pd.read_csv('y_test.csv', header=0)
pr = pd.read_csv('y_pred.csv', header=0)
gt = gt.to_numpy().astype(float).reshape(-1)
pr = pr.to_numpy().astype(float).reshape(-1)
score = mean_squared_error(gt, pr, squared = False)
print("score:", score)
userid,contentsid,rating
1,T000036,5
1,T000040,0.5
1,T000041,0.5
1,T000042,0.5
1,T000044,5
1,T000045,0.5
1,T000046,0.5
1,T000047,0.5
1,T000048,5
1,T000049,0.5
1,T000050,0.5
1,T000051,0.5
1,T000052,0.5
1,T000053,0.5
1,T000054,0.5
1,T000056,0.5
1,T000057,5
1,T000058,5
1,T000059,0.5
1,T000060,5
1,T000061,0.5
1,T000062,0.5
1,T000078,0.5
1,T000077,0.5
1,T000076,0.5
1,T000075,0.5
1,T000074,0.5
1,T000073,0.5
1,T000071,0.5
1,T000070,0.5
1,T000069,0.5
1,T000063,0.5
1,T000065,5
1,T000066,5
1,T000067,0.5
1,T000068,0.5
1,T001631,0.5
1,T001629,0.5
1,T001628,0.5
1,T000312,0.5
1,T000311,0.5
1,T000310,0.5
1,T000309,0.5
1,T000307,0.5
1,T000306,0.5
1,T000305,0.5
1,T000304,0.5
1,T000303,0.5
1,T000302,0.5
1,T000301,0.5
1,T000299,0.5
1,T000298,0.5
1,T000297,0.5
1,T000295,0.5
1,T000294,0.5
1,T000292,0.5
1,T000291,0.5
1,T000289,0.5
1,T000288,0.5
1,T000286,0.5
1,T000212,0.5
1,T001617,0.5
1,T001615,0.5
1,T001614,0.5
1,T001612,0.5
1,T001611,0.5
1,T001610,0.5
1,T001609,0.5
1,T001608,0.5
1,T001607,0.5
1,T001606,0.5
1,T001604,0.5
1,T001603,0.5
1,T001602,0.5
1,T001600,0.5
1,T000287,0.5
1,T001921,0.5
1,T001920,5
1,T001918,0.5
1,T001917,0.5
1,T001916,0.5
1,T001915,0.5
1,T001914,5
1,T001911,0.5
1,T001942,0.5
1,T001947,0.5
1,T001943,0.5
1,T001944,0.5
1,T001945,0.5
1,T001956,0.5
2,T000036,5
2,T000040,0.5
2,T000041,0.5
2,T000042,0.5
2,T000043,0.5
2,T000044,5
2,T000045,5
2,T000047,0.5
2,T000048,0.5
2,T000049,0.5
2,T000050,0.5
2,T000052,0.5
2,T000053,5
2,T000054,0.5
2,T000055,0.5
2,T000056,5
2,T000057,0.5
2,T000058,0.5
2,T000059,0.5
2,T000060,0.5
2,T000061,0.5
2,T000062,0.5
2,T000078,0.5
2,T000077,0.5
2,T000076,0.5
2,T000075,0.5
2,T000073,0.5
2,T000072,0.5
2,T000071,0.5
2,T000070,0.5
2,T000069,0.5
2,T000063,5
2,T000064,5
2,T000065,0.5
2,T000066,0.5
2,T000067,0.5
2,T000068,0.5
2,T001631,0.5
2,T001630,0.5
2,T001629,0.5
2,T001628,0.5
2,T000312,0.5
2,T000311,0.5
2,T000310,0.5
2,T000309,0.5
2,T000306,0.5
2,T000305,0.5
2,T000304,0.5
2,T000303,5
2,T000302,0.5
2,T000301,0.5
2,T000298,0.5
2,T000297,0.5
2,T000295,5
2,T000294,0.5
2,T000293,0.5
2,T000292,0.5
2,T000289,0.5
2,T000288,0.5
2,T000286,0.5
2,T000212,5
2,T001617,0.5
2,T001616,0.5
2,T001615,0.5
2,T001614,0.5
2,T001612,0.5
2,T001611,0.5
2,T001610,0.5
2,T001609,0.5
2,T001608,0.5
2,T001606,0.5
2,T001604,0.5
2,T001603,0.5
2,T001602,5
2,T001601,0.5
2,T001600,5
2,T000287,0.5
2,T001921,0.5
2,T001919,0.5
2,T001918,0.5
2,T001917,0.5
2,T001915,0.5
2,T001914,0.5
2,T001911,5
2,T001942,0.5
2,T001947,0.5
2,T001944,5
2,T001945,0.5
2,T001946,5
2,T001956,0.5
3,T000040,0.5
3,T000041,5
3,T000042,0.5
3,T000043,5
3,T000044,0.5
3,T000045,0.5
3,T000046,5
3,T000047,0.5
3,T000048,5
3,T000050,5
3,T000051,0.5
3,T000052,0.5
3,T000054,5
3,T000055,0.5
3,T000056,0.5
3,T000057,0.5
3,T000058,0.5
3,T000059,5
3,T000060,0.5
3,T000062,0.5
3,T000078,0.5
3,T000077,0.5
3,T000076,0.5
3,T000075,0.5
3,T000074,0.5
3,T000072,0.5
3,T000071,0.5
3,T000070,0.5
3,T000069,0.5
3,T000063,0.5
3,T000064,0.5
3,T000065,0.5
3,T000066,0.5
3,T000067,5
3,T000068,0.5
3,T001631,5
3,T001630,5
3,T001629,5
3,T000312,5
3,T000311,0.5
3,T000310,0.5
3,T000309,5
3,T000308,0.5
3,T000307,0.5
3,T000306,0.5
3,T000305,0.5
3,T000304,0.5
3,T000303,0.5
3,T000301,0.5
3,T000299,0.5
3,T000298,0.5
3,T000297,0.5
3,T000295,0.5
3,T000294,0.5
3,T000293,0.5
3,T000292,0.5
3,T000291,0.5
3,T000289,0.5
3,T000288,0.5
3,T000286,5
3,T001617,0.5
3,T001615,0.5
3,T001614,0.5
3,T001613,0.5
3,T001612,5
3,T001611,0.5
3,T001610,5
3,T001609,5
3,T001608,0.5
3,T001607,0.5
3,T001604,5
3,T001603,0.5
3,T001602,0.5
3,T001601,5
3,T001600,0.5
3,T000287,0.5
3,T001921,0.5
3,T001919,0.5
3,T001918,5
3,T001917,0.5
3,T001916,5
3,T001914,0.5
3,T001911,0.5
3,T001942,0.5
3,T001947,0.5
3,T001943,0.5
3,T001944,5
3,T001945,0.5
3,T001946,5
3,T001956,0.5
4,T001942,0.5
4,T001943,0.5
4,T001944,0.5
4,T001945,0.5
4,T001946,0.5
4,T001956,0.5
4,T001920,5
4,T001919,5
4,T001918,5
4,T001917,0.5
4,T001916,0.5
4,T001915,0.5
4,T001914,0.5
4,T001911,0.5
4,T001616,0.5
4,T001615,0.5
4,T001614,0.5
4,T001613,0.5
4,T001612,0.5
4,T001611,0.5
4,T001610,0.5
4,T001609,0.5
4,T001608,0.5
4,T001607,0.5
4,T001604,0.5
4,T001603,0.5
4,T001602,0.5
4,T001601,0.5
4,T001600,0.5
4,T000287,0.5
4,T000036,0.5
4,T000041,0.5
4,T000042,0.5
4,T000043,0.5
4,T000044,0.5
4,T000046,0.5
4,T000047,0.5
4,T000048,0.5
4,T000049,5
4,T000050,0.5
4,T000051,0.5
4,T000052,0.5
4,T000053,0.5
4,T000054,0.5
4,T000055,0.5
4,T000056,0.5
4,T000057,0.5
4,T000058,0.5
4,T000059,0.5
4,T000061,5
4,T000062,5
4,T000078,0.5
4,T000076,0.5
4,T000075,0.5
4,T000074,0.5
4,T000073,0.5
4,T000072,0.5
4,T000071,0.5
4,T000070,0.5
4,T000069,0.5
4,T000063,0.5
4,T000064,0.5
4,T000065,0.5
4,T000066,0.5
4,T000067,0.5
4,T001631,0.5
4,T001630,0.5
4,T001629,0.5
4,T001628,0.5
4,T000312,0.5
4,T000311,0.5
4,T000310,0.5
4,T000309,0.5
4,T000308,0.5
4,T000307,0.5
4,T000306,0.5
4,T000305,0.5
4,T000304,0.5
4,T000303,0.5
4,T000301,0.5
4,T000299,0.5
4,T000298,0.5
4,T000297,0.5
4,T000295,0.5
4,T000294,0.5
4,T000292,0.5
4,T000291,0.5
4,T000286,0.5
4,T000212,0.5
5,T001942,0.5
5,T001947,5
5,T001943,5
5,T001944,0.5
5,T001945,5
5,T001946,5
5,T001921,0.5
5,T001920,5
5,T001919,5
5,T001918,5
5,T001917,0.5
5,T001916,0.5
5,T001914,5
5,T001911,5
5,T001617,5
5,T001616,0.5
5,T001615,5
5,T001614,0.5
5,T001613,5
5,T001612,5
5,T001610,5
5,T001609,0.5
5,T001608,0.5
5,T001607,0.5
5,T001606,0.5
5,T001603,0.5
5,T001602,0.5
5,T001601,0.5
5,T001600,0.5
5,T000287,0.5
5,T000036,0.5
5,T000040,5
5,T000041,0.5
5,T000042,0.5
5,T000043,5
5,T000044,0.5
5,T000045,5
5,T000047,0.5
5,T000048,0.5
5,T000049,5
5,T000050,5
5,T000051,0.5
5,T000052,0.5
5,T000053,5
5,T000054,5
5,T000055,0.5
5,T000057,0.5
5,T000058,0.5
5,T000059,5
5,T000060,0.5
5,T000061,0.5
5,T000062,5
5,T000078,0.5
5,T000077,0.5
5,T000076,0.5
5,T000075,5
5,T000074,5
5,T000072,0.5
5,T000071,0.5
5,T000070,0.5
5,T000069,0.5
5,T000063,0.5
5,T000064,0.5
5,T000066,0.5
5,T000067,0.5
5,T000068,0.5
5,T001631,5
5,T001629,0.5
5,T001628,0.5
5,T000312,0.5
5,T000311,0.5
5,T000310,0.5
5,T000308,0.5
5,T000307,0.5
5,T000306,0.5
5,T000305,0.5
5,T000304,0.5
5,T000303,5
5,T000302,0.5
5,T000301,0.5
5,T000298,0.5
5,T000297,0.5
5,T000295,0.5
5,T000293,0.5
5,T000292,0.5
5,T000291,0.5
5,T000288,5
5,T000286,0.5
5,T000212,0.5
6,T001942,5
6,T001947,0.5
6,T001944,5
6,T001945,5
6,T001946,5
6,T001956,0.5
6,T001921,5
6,T001920,5
6,T001919,0.5
6,T001918,0.5
6,T001917,5
6,T001916,5
6,T001915,0.5
6,T001914,5
6,T001617,5
6,T001616,5
6,T001615,5
6,T001614,0.5
6,T001613,5
6,T001612,5
6,T001611,5
6,T001610,5
6,T001609,5
6,T001608,5
6,T001607,0.5
6,T001606,0.5
6,T001604,5
6,T001603,0.5
6,T001602,0.5
6,T001601,5
6,T001600,5
6,T000287,0.5
6,T000040,5
6,T000041,5
6,T000042,0.5
6,T000043,5
6,T000044,5
6,T000045,5
6,T000046,0.5
6,T000047,5
6,T000048,5
6,T000049,5
6,T000051,0.5
6,T000052,5
6,T000053,5
6,T000054,5
6,T000055,5
6,T000057,0.5
6,T000058,5
6,T000060,5
6,T000061,5
6,T000062,5
6,T000078,0.5
6,T000077,0.5
6,T000076,0.5
6,T000075,5
6,T000073,0.5
6,T000070,0.5
6,T000069,0.5
6,T000063,5
6,T000064,5
6,T000065,5
6,T000066,0.5
6,T000067,0.5
6,T000068,0.5
6,T001631,5
6,T001630,0.5
6,T001629,5
6,T001628,5
6,T000312,5
6,T000311,0.5
6,T000310,0.5
6,T000309,5
6,T000308,5
6,T000307,5
6,T000306,0.5
6,T000305,5
6,T000304,5
6,T000303,0.5
6,T000302,5
6,T000301,0.5
6,T000299,0.5
6,T000298,5
6,T000297,5
6,T000295,5
6,T000294,0.5
6,T000291,0.5
6,T000288,5
6,T000286,5
7,T000036,5
7,T000040,0.5
7,T000041,5
7,T000042,5
7,T000043,5
7,T000044,0.5
7,T000045,5
7,T000046,5
7,T000047,5
7,T000048,0.5
7,T000049,5
7,T000050,5
7,T000051,0.5
7,T000052,5
7,T000055,0.5
7,T000056,5
7,T000057,0.5
7,T000058,0.5
7,T000059,5
7,T000061,5
7,T000062,0.5
7,T000077,5
7,T000076,5
7,T000075,0.5
7,T000074,5
7,T000073,0.5
7,T000072,5
7,T000070,5
7,T000069,5
7,T000063,5
7,T000064,5
7,T000065,5
7,T000066,0.5
7,T000067,5
7,T000068,0.5
7,T001631,5
7,T001630,0.5
7,T001629,5
7,T001628,5
7,T000312,0.5
7,T000311,5
7,T000310,0.5
7,T000309,0.5
7,T000308,0.5
7,T000307,5
7,T000306,5
7,T000305,5
7,T000304,5
7,T000303,5
7,T000302,5
7,T000301,5
7,T000299,0.5
7,T000297,0.5
7,T000295,0.5
7,T000294,0.5
7,T000293,0.5
7,T000292,5
7,T000291,0.5
7,T000289,5
7,T000286,0.5
7,T000212,0.5
7,T001617,0.5
7,T001616,5
7,T001615,0.5
7,T001614,5
7,T001613,5
7,T001612,0.5
7,T001611,0.5
7,T001610,0.5
7,T001609,5
7,T001607,0.5
7,T001604,5
7,T001603,0.5
7,T001602,0.5
7,T001601,0.5
7,T001600,5
7,T000287,5
7,T001921,5
7,T001920,5
7,T001919,5
7,T001918,0.5
7,T001916,0.5
7,T001911,5
7,T001942,5
7,T001947,0.5
7,T001943,0.5
7,T001944,0.5
7,T001945,5
7,T001946,5
7,T001956,0.5
8,T000036,5
8,T000041,0.5
8,T000042,5
8,T000043,0.5
8,T000045,0.5
8,T000046,0.5
8,T000047,0.5
8,T000048,5
8,T000049,5
8,T000050,0.5
8,T000051,5
8,T000052,5
8,T000054,5
8,T000055,0.5
8,T000056,5
8,T000057,5
8,T000058,5
8,T000060,5
8,T000062,0.5
8,T000078,0.5
8,T000077,0.5
8,T000076,0.5
8,T000075,0.5
8,T000074,0.5
8,T000073,0.5
8,T000071,0.5
8,T000070,0.5
8,T000069,0.5
8,T000063,0.5
8,T000064,5
8,T000065,5
8,T000066,0.5
8,T000067,5
8,T000068,0.5
8,T001630,0.5
8,T001629,0.5
8,T001628,5
8,T000312,0.5
8,T000311,0.5
8,T000310,5
8,T000309,0.5
8,T000308,0.5
8,T000307,0.5
8,T000306,5
8,T000305,5
8,T000304,0.5
8,T000303,0.5
8,T000302,0.5
8,T000299,0.5
8,T000298,0.5
8,T000297,5
8,T000293,0.5
8,T000292,5
8,T000291,5
8,T000289,0.5
8,T000288,5
8,T000286,0.5
8,T000212,0.5
8,T001617,5
8,T001615,5
8,T001614,0.5
8,T001613,0.5
8,T001612,0.5
8,T001611,0.5
8,T001610,5
8,T001609,0.5
8,T001608,0.5
8,T001607,0.5
8,T001606,0.5
8,T001604,0.5
8,T001603,5
8,T001602,5
8,T001601,0.5
8,T001600,0.5
8,T000287,0.5
8,T001921,0.5
8,T001920,5
8,T001919,0.5
8,T001918,0.5
8,T001917,5
8,T001916,5
8,T001915,0.5
8,T001914,0.5
8,T001911,5
8,T001942,0.5
8,T001947,0.5
8,T001943,5
8,T001945,0.5
8,T001946,5
8,T001956,5
9,T000036,5
9,T000040,5
9,T000041,5
9,T000042,0.5
9,T000043,0.5
9,T000044,0.5
9,T000045,5
9,T000046,0.5
9,T000047,5
9,T000048,5
9,T000050,5
9,T000052,0.5
9,T000053,5
9,T000057,0.5
9,T000058,0.5
9,T000059,5
9,T000060,0.5
9,T000061,5
9,T000062,5
9,T000078,0.5
9,T000077,5
9,T000076,5
9,T000075,5
9,T000074,0.5
9,T000073,0.5
9,T000072,0.5
9,T000071,5
9,T000070,0.5
9,T000069,0.5
9,T000063,5
9,T000064,5
9,T000065,0.5
9,T000066,0.5
9,T000067,0.5
9,T000068,5
9,T001631,5
9,T001630,5
9,T001629,5
9,T001628,5
9,T000312,0.5
9,T000310,0.5
9,T000308,0.5
9,T000307,0.5
9,T000306,0.5
9,T000305,0.5
9,T000304,0.5
9,T000303,0.5
9,T000302,0.5
9,T000301,5
9,T000299,5
9,T000298,5
9,T000295,0.5
9,T000294,5
9,T000293,5
9,T000292,0.5
9,T000291,0.5
9,T000288,5
9,T000286,5
9,T000212,0.5
9,T001617,0.5
9,T001616,5
9,T001615,5
9,T001613,5
9,T001611,0.5
9,T001610,0.5
9,T001609,5
9,T001608,0.5
9,T001607,5
9,T001606,5
9,T001604,5
9,T001603,5
9,T001602,0.5
9,T001601,0.5
9,T001600,0.5
9,T000287,5
9,T001921,0.5
9,T001920,5
9,T001919,0.5
9,T001918,5
9,T001917,5
9,T001916,5
9,T001915,0.5
9,T001914,0.5
9,T001911,5
9,T001942,0.5
9,T001947,0.5
9,T001943,0.5
9,T001944,5
9,T001945,5
9,T001946,5
10,T000036,0.5
10,T000040,5
10,T000041,0.5
10,T000042,0.5
10,T000043,5
10,T000044,5
10,T000046,5
10,T000047,5
10,T000048,0.5
10,T000049,0.5
10,T000050,5
10,T000051,5
10,T000052,0.5
10,T000054,5
10,T000055,5
10,T000056,5
10,T000057,0.5
10,T000058,0.5
10,T000059,0.5
10,T000060,0.5
10,T000062,0.5
10,T000078,0.5
10,T000077,0.5
10,T000076,0.5
10,T000075,5
10,T000074,0.5
10,T000073,5
10,T000072,5
10,T000071,0.5
10,T000070,5
10,T000063,5
10,T000064,0.5
10,T000065,0.5
10,T000066,5
10,T000067,0.5
10,T001631,0.5
10,T001630,0.5
10,T001629,0.5
10,T001628,0.5
10,T000311,0.5
10,T000310,0.5
10,T000309,0.5
10,T000308,0.5
10,T000307,0.5
10,T000306,0.5
10,T000305,0.5
10,T000304,0.5
10,T000302,0.5
10,T000301,0.5
10,T000299,0.5
10,T000298,0.5
10,T000295,0.5
10,T000294,0.5
10,T000293,0.5
10,T000292,0.5
10,T000291,0.5
10,T000289,0.5
10,T000288,0.5
10,T000286,0.5
10,T000212,0.5
10,T001617,0.5
10,T001616,5
10,T001614,5
10,T001613,0.5
10,T001612,0.5
10,T001611,0.5
10,T001610,0.5
10,T001608,0.5
10,T001607,0.5
10,T001606,5
10,T001604,0.5
10,T001603,0.5
10,T001602,0.5
10,T001601,5
10,T001600,0.5
10,T001921,5
10,T001920,5
10,T001919,5
10,T001918,0.5
10,T001917,0.5
10,T001916,5
10,T001914,0.5
10,T001911,0.5
10,T001942,0.5
10,T001947,0.5
10,T001943,5
10,T001944,0.5
10,T001945,5
10,T001946,0.5
10,T001956,0.5
11,T000040,0.5
11,T000041,0.5
11,T000042,0.5
11,T000043,0.5
11,T000044,5
11,T000045,5
11,T000047,0.5
11,T000048,0.5
11,T000049,5
11,T000050,5
11,T000051,5
11,T000052,0.5
11,T000053,0.5
11,T000055,0.5
11,T000056,5
11,T000057,0.5
11,T000058,0.5
11,T000059,0.5
11,T000060,0.5
11,T000061,5
11,T000062,5
11,T000078,0.5
11,T000076,0.5
11,T000075,5
11,T000074,5
11,T000073,0.5
11,T000072,0.5
11,T000071,0.5
11,T000070,5
11,T000063,0.5
11,T000064,5
11,T000065,5
11,T000066,0.5
11,T000067,5
11,T000068,0.5
11,T001631,0.5
11,T001630,0.5
11,T001629,0.5
11,T000312,5
11,T000311,5
11,T000310,0.5
11,T000309,0.5
11,T000307,0.5
11,T000306,0.5
11,T000305,0.5
11,T000304,0.5
11,T000303,0.5
11,T000302,0.5
11,T000299,5
11,T000298,5
11,T000297,5
11,T000295,0.5
11,T000294,0.5
11,T000293,0.5
11,T000292,0.5
11,T000291,5
11,T000289,5
11,T000286,5
11,T000212,0.5
11,T001617,0.5
11,T001616,5
11,T001615,5
11,T001614,5
11,T001613,0.5
11,T001612,0.5
11,T001611,0.5
11,T001609,5
11,T001608,5
11,T001607,0.5
11,T001606,0.5
11,T001604,5
11,T001603,0.5
11,T001602,0.5
11,T001601,0.5
11,T000287,0.5
11,T001921,5
11,T001920,5
11,T001919,5
11,T001918,0.5
11,T001917,0.5
11,T001916,0.5
11,T001915,0.5
11,T001911,0.5
11,T001942,0.5
11,T001947,0.5
11,T001943,0.5
11,T001944,5
11,T001945,0.5
11,T001946,0.5
11,T001956,0.5
12,T000036,5
12,T000040,0.5
12,T000041,0.5
12,T000042,0.5
12,T000043,5
12,T000044,0.5
12,T000046,0.5
12,T000047,5
12,T000049,5
12,T000050,5
12,T000051,0.5
12,T000052,5
12,T000053,5
12,T000054,5
12,T000055,0.5
12,T000057,0.5
12,T000058,0.5
12,T000059,0.5
12,T000060,5
12,T000061,5
12,T000062,5
12,T000078,0.5
12,T000077,0.5
12,T000076,5
12,T000074,0.5
12,T000073,5
12,T000072,0.5
12,T000071,5
12,T000070,0.5
12,T000069,5
12,T000064,5
12,T000065,0.5
12,T000066,0.5
12,T000067,5
12,T000068,0.5
12,T001630,0.5
12,T001629,0.5
12,T001628,0.5
12,T000312,0.5
12,T000311,0.5
12,T000310,5
12,T000308,0.5
12,T000307,0.5
12,T000306,0.5
12,T000305,0.5
12,T000304,0.5
12,T000303,5
12,T000301,0.5
12,T000299,5
12,T000298,0.5
12,T000297,0.5
12,T000295,0.5
12,T000294,5
12,T000293,5
12,T000292,0.5
12,T000289,0.5
12,T000288,5
12,T000286,5
12,T000212,0.5
12,T001617,0.5
12,T001615,5
12,T001614,0.5
12,T001613,5
12,T001612,5
12,T001611,5
12,T001610,5
12,T001609,0.5
12,T001608,0.5
12,T001607,0.5
12,T001606,0.5
12,T001604,0.5
12,T001603,0.5
12,T001602,0.5
12,T001601,5
12,T001600,0.5
12,T000287,0.5
12,T001921,5
12,T001920,5
12,T001919,0.5
12,T001917,5
12,T001916,0.5
12,T001915,5
12,T001914,0.5
12,T001911,0.5
12,T001942,0.5
12,T001943,0.5
12,T001944,0.5
12,T001945,5
12,T001946,0.5
12,T001956,5
13,T000036,0.5
13,T000040,0.5
13,T000041,0.5
13,T000042,0.5
13,T000044,5
13,T000045,5
13,T000046,0.5
13,T000047,5
13,T000048,5
13,T000049,5
13,T000050,5
13,T000051,0.5
13,T000053,0.5
13,T000054,0.5
13,T000055,0.5
13,T000056,5
13,T000057,5
13,T000058,5
13,T000060,5
13,T000061,5
13,T000062,5
13,T000078,5
13,T000077,5
13,T000076,5
13,T000075,5
13,T000074,5
13,T000073,5
13,T000072,5
13,T000070,5
13,T000069,5
13,T000063,5
13,T000064,5
13,T000065,5
13,T000066,5
13,T000067,5
13,T000068,0.5
13,T001631,5
13,T001630,5
13,T001629,0.5
13,T000312,5
13,T000311,5
13,T000310,5
13,T000309,5
13,T000308,5
13,T000306,0.5
13,T000305,5
13,T000304,5
13,T000303,5
13,T000302,5
13,T000301,5
13,T000298,0.5
13,T000297,0.5
13,T000295,0.5
13,T000294,5
13,T000293,5
13,T000292,5
13,T000289,5
13,T000288,5
13,T000286,5
13,T000212,0.5
13,T001617,5
13,T001616,5
13,T001615,5
13,T001613,5
13,T001612,0.5
13,T001611,0.5
13,T001610,5
13,T001609,0.5
13,T001608,0.5
13,T001607,5
13,T001606,0.5
13,T001603,5
13,T001602,0.5
13,T001601,0.5
13,T001600,0.5
13,T000287,0.5
13,T001920,0.5
13,T001919,0.5
13,T001918,0.5
13,T001917,0.5
13,T001916,0.5
13,T001915,0.5
13,T001914,0.5
13,T001942,0.5
13,T001947,0.5
13,T001943,5
13,T001944,0.5
13,T001945,5
13,T001946,5
13,T001956,0.5
userid,contentsid,rating
1,T000043,5
1,T000055,0.5
1,T000072,0.5
1,T000064,5
1,T001630,0.5
1,T000308,0.5
1,T000293,0.5
1,T001616,0.5
1,T001613,5
1,T001601,0.5
1,T001919,0.5
1,T001946,5
2,T000046,5
2,T000051,0.5
2,T000074,0.5
2,T000308,0.5
2,T000307,5
2,T000299,0.5
2,T000291,0.5
2,T001613,5
2,T001607,0.5
2,T001920,5
2,T001916,0.5
2,T001943,0.5
3,T000036,5
3,T000049,0.5
3,T000053,0.5
3,T000061,5
3,T000073,0.5
3,T001628,5
3,T000302,5
3,T000212,0.5
3,T001616,0.5
3,T001606,0.5
3,T001920,0.5
3,T001915,0.5
4,T001947,0.5
4,T001921,0.5
4,T001617,0.5
4,T001606,0.5
4,T000040,0.5
4,T000045,0.5
4,T000060,5
4,T000077,0.5
4,T000068,0.5
4,T000302,0.5
4,T000293,0.5
4,T000288,0.5
5,T001956,5
5,T001915,0.5
5,T001611,5
5,T001604,5
5,T000046,0.5
5,T000056,5
5,T000073,0.5
5,T000065,0.5
5,T001630,0.5
5,T000309,0.5
5,T000299,5
5,T000294,0.5
6,T001943,5
6,T001911,0.5
6,T000036,0.5
6,T000050,5
6,T000056,0.5
6,T000059,5
6,T000074,5
6,T000072,0.5
6,T000071,0.5
6,T000293,0.5
6,T000292,5
6,T000212,0.5
7,T000053,5
7,T000054,5
7,T000060,0.5
7,T000078,0.5
7,T000071,0.5
7,T000298,0.5
7,T000288,0.5
7,T001608,0.5
7,T001606,5
7,T001917,0.5
7,T001915,0.5
7,T001914,5
8,T000040,0.5
8,T000044,0.5
8,T000053,0.5
8,T000059,5
8,T000061,5
8,T000072,0.5
8,T001631,0.5
8,T000301,0.5
8,T000295,5
8,T000294,5
8,T001616,5
8,T001944,5
9,T000049,5
9,T000051,0.5
9,T000054,5
9,T000055,5
9,T000056,5
9,T000311,5
9,T000309,5
9,T000297,5
9,T000289,5
9,T001614,0.5
9,T001612,0.5
9,T001956,0.5
10,T000045,0.5
10,T000053,0.5
10,T000061,5
10,T000069,0.5
10,T000068,0.5
10,T000312,0.5
10,T000303,0.5
10,T000297,0.5
10,T000287,5
10,T001615,5
10,T001609,5
10,T001915,5
11,T000036,5
11,T000046,0.5
11,T000054,5
11,T000077,0.5
11,T000069,0.5
11,T001628,5
11,T000308,5
11,T000301,0.5
11,T000288,5
11,T001610,0.5
11,T001600,5
11,T001914,5
12,T000045,0.5
12,T000048,0.5
12,T000056,5
12,T000075,5
12,T000063,0.5
12,T001631,5
12,T000309,0.5
12,T000302,0.5
12,T000291,5
12,T001616,0.5
12,T001918,5
12,T001947,5
13,T000043,5
13,T000052,5
13,T000059,5
13,T000071,5
13,T001628,0.5
13,T000307,5
13,T000299,0.5
13,T000291,5
13,T001614,0.5
13,T001604,5
13,T001921,0.5
13,T001911,0.5