首页下载资源人工智能时间序列异常检测相关代码

ZIP时间序列异常检测相关代码

weixin_442451881.72MB需要积分:1

资源文件列表:

KPIAnmalyDetect.zip 大约有91个文件
  1. KPIAnmalyDetect/
  2. KPIAnmalyDetect/.git/
  3. KPIAnmalyDetect/.git/COMMIT_EDITMSG 13B
  4. KPIAnmalyDetect/.git/config 250B
  5. KPIAnmalyDetect/.git/description 73B
  6. KPIAnmalyDetect/.git/HEAD 21B
  7. KPIAnmalyDetect/.git/hooks/
  8. KPIAnmalyDetect/.git/hooks/applypatch-msg.sample 478B
  9. KPIAnmalyDetect/.git/hooks/commit-msg.sample 896B
  10. KPIAnmalyDetect/.git/hooks/fsmonitor-watchman.sample 4.62KB
  11. KPIAnmalyDetect/.git/hooks/post-update.sample 189B
  12. KPIAnmalyDetect/.git/hooks/pre-applypatch.sample 424B
  13. KPIAnmalyDetect/.git/hooks/pre-commit.sample 1.6KB
  14. KPIAnmalyDetect/.git/hooks/pre-merge-commit.sample 416B
  15. KPIAnmalyDetect/.git/hooks/pre-push.sample 1.34KB
  16. KPIAnmalyDetect/.git/hooks/pre-rebase.sample 4.78KB
  17. KPIAnmalyDetect/.git/hooks/pre-receive.sample 544B
  18. KPIAnmalyDetect/.git/hooks/prepare-commit-msg.sample 1.46KB
  19. KPIAnmalyDetect/.git/hooks/push-to-checkout.sample 2.72KB
  20. KPIAnmalyDetect/.git/hooks/sendemail-validate.sample 2.25KB
  21. KPIAnmalyDetect/.git/hooks/update.sample 3.56KB
  22. KPIAnmalyDetect/.git/index 1.4KB
  23. KPIAnmalyDetect/.git/info/
  24. KPIAnmalyDetect/.git/info/exclude 240B
  25. KPIAnmalyDetect/.git/logs/
  26. KPIAnmalyDetect/.git/logs/HEAD 1.22KB
  27. KPIAnmalyDetect/.git/logs/refs/
  28. KPIAnmalyDetect/.git/logs/refs/heads/
  29. KPIAnmalyDetect/.git/logs/refs/heads/main 704B
  30. KPIAnmalyDetect/.git/objects/
  31. KPIAnmalyDetect/.git/objects/17/
  32. KPIAnmalyDetect/.git/objects/17/b5e7d565beafa42a2cd0953dbc1a78522fc162 1.7KB
  33. KPIAnmalyDetect/.git/objects/1b/
  34. KPIAnmalyDetect/.git/objects/1b/c18208157ff47e142215efc1c8453a3a3ee420 861B
  35. KPIAnmalyDetect/.git/objects/1d/
  36. KPIAnmalyDetect/.git/objects/1d/f235d02f5c89fde19bea84560e50d2c1aba2ed 568B
  37. KPIAnmalyDetect/.git/objects/29/
  38. KPIAnmalyDetect/.git/objects/29/84a226111e9062d53a20e6efd9fee7085ad056 125.86KB
  39. KPIAnmalyDetect/.git/objects/2c/
  40. KPIAnmalyDetect/.git/objects/2c/82a03e6eb026710532ce3d00a32183fe72fb61 160.67KB
  41. KPIAnmalyDetect/.git/objects/3d/
  42. KPIAnmalyDetect/.git/objects/3d/ea055931304295907b3f1f4936d3f6700c23b2 45KB
  43. KPIAnmalyDetect/.git/objects/3e/
  44. KPIAnmalyDetect/.git/objects/3e/6cbccba817f8567d1be18a227dcd15d6afce35 519B
  45. KPIAnmalyDetect/.git/objects/47/
  46. KPIAnmalyDetect/.git/objects/47/b626174f346b79c5e7e7e22fe9c61249f7d800 123B
  47. KPIAnmalyDetect/.git/objects/78/
  48. KPIAnmalyDetect/.git/objects/78/29311f0dc4c9b57d380a8f353196f6b7f43f96 1.21KB
  49. KPIAnmalyDetect/.git/objects/84/
  50. KPIAnmalyDetect/.git/objects/84/cbbefd43a1aed9c28c95fe9e6e88deb7864230 125.94KB
  51. KPIAnmalyDetect/.git/objects/88/
  52. KPIAnmalyDetect/.git/objects/88/ed2cfddd29cc16227680426828e106942e3ed1 1.82KB
  53. KPIAnmalyDetect/.git/objects/ad/
  54. KPIAnmalyDetect/.git/objects/ad/3f8692d0f4dd79acc3148aff0f0420405063f8 242.75KB
  55. KPIAnmalyDetect/.git/objects/bc/
  56. KPIAnmalyDetect/.git/objects/bc/e24835b88393abd6512775d9ccc9acf790b0e9 909B
  57. KPIAnmalyDetect/.git/objects/c7/
  58. KPIAnmalyDetect/.git/objects/c7/45fcee35a90c0de27e1db55859d9d89fc32d8e 620B
  59. KPIAnmalyDetect/.git/objects/d8/
  60. KPIAnmalyDetect/.git/objects/d8/141f7fd4aabbde9f48c957fcb5fbe2a28b289f 503B
  61. KPIAnmalyDetect/.git/objects/db/
  62. KPIAnmalyDetect/.git/objects/db/730e30b58ddcda997728b68a48efd528018ee0 742B
  63. KPIAnmalyDetect/.git/objects/e0/
  64. KPIAnmalyDetect/.git/objects/e0/119ddd1d1903a584450a2e0ddfb2d89987a8c3 118.69KB
  65. KPIAnmalyDetect/.git/objects/e3/
  66. KPIAnmalyDetect/.git/objects/e3/c126db3fd114f76b700645fbc6267e8b826347 1.17KB
  67. KPIAnmalyDetect/.git/objects/e4/
  68. KPIAnmalyDetect/.git/objects/e4/0ca1fb03d838e55163c783864ae9058d424087 116.76KB
  69. KPIAnmalyDetect/.git/objects/info/
  70. KPIAnmalyDetect/.git/objects/pack/
  71. KPIAnmalyDetect/.git/refs/
  72. KPIAnmalyDetect/.git/refs/heads/
  73. KPIAnmalyDetect/.git/refs/heads/main 41B
  74. KPIAnmalyDetect/.git/refs/tags/
  75. KPIAnmalyDetect/box.py 2.64KB
  76. KPIAnmalyDetect/data.csv 425.83KB
  77. KPIAnmalyDetect/data_balance.csv 569.81KB
  78. KPIAnmalyDetect/data_box.csv 543.96KB
  79. KPIAnmalyDetect/data_explore.ipynb 236.52KB
  80. KPIAnmalyDetect/data_k_means.csv 462.26KB
  81. KPIAnmalyDetect/data_normal.csv 543.96KB
  82. KPIAnmalyDetect/demo1_svm.py 1.15KB
  83. KPIAnmalyDetect/demo2_lstm.py 1.83KB
  84. KPIAnmalyDetect/demo2_lstm_.py 1.91KB
  85. KPIAnmalyDetect/demo3_k_means_.py 3.56KB
  86. KPIAnmalyDetect/demo4_iForest.py 751B
  87. KPIAnmalyDetect/demo5_anencoder.py 5.26KB
  88. KPIAnmalyDetect/demo6_one-class-SVM.py 1.42KB
  89. KPIAnmalyDetect/k_means_result.csv 336.78KB
  90. KPIAnmalyDetect/make_balance.py 737B
  91. KPIAnmalyDetect/normal.py 2.76KB

资源介绍:

时间序列异常检测代码
import os import pandas as pd import numpy as np import tensorflow as tf from sklearn import preprocessing import seaborn as sns sns.set(color_codes=True) import matplotlib.pyplot as plt def data_load(): # Load data set # 读取数据集 merged_data = pd.read_csv("data.csv", index_col='_time') print(merged_data.head()) #merged_data.plot() # Data pre-processing # Split the training and test sets merged_data=merged_data.sort_index(ascending=True) dataset_train,dataset_test=merged_data.iloc[:int(len(merged_data)*0.7)],merged_data.iloc[int(len(merged_data)*0.7):] #dataset_train.plot(figsize = (12,6)) """ Normalize data """ scaler = preprocessing.MinMaxScaler() # 归一化 X_train = pd.DataFrame(scaler.fit_transform(dataset_train), # Find the mean and standard deviation of X_train and apply them to X_train columns=dataset_train.columns, index=dataset_train.index) # Random shuffle training data X_train.sample(frac=1) X_test = pd.DataFrame(scaler.transform(dataset_test), columns=dataset_test.columns, index=dataset_test.index) return X_train,X_test # Build AutoEncoding model def AutoEncoder_build( X_train, act_func): tf.random.set_seed(10) # act_func = 'elu' # Input layer: model = tf.keras.Sequential() # Sequential() is a container that describes the network structure of the neural network, sequentially processing the model # First hidden layer, connected to input vector X. model.add(tf.keras.layers.Dense(10, activation=act_func, # activation function kernel_initializer='glorot_uniform', # Weight initialization kernel_regularizer=tf.keras.regularizers.l2(0.0), # Regularization to prevent overfitting input_shape=(X_train.shape[1],) ) ) model.add(tf.keras.layers.Dense(2, activation=act_func, kernel_initializer='glorot_uniform')) model.add(tf.keras.layers.Dense(10, activation=act_func, kernel_initializer='glorot_uniform')) model.add(tf.keras.layers.Dense(X_train.shape[1], kernel_initializer='glorot_uniform')) model.compile(loss='mse', optimizer='adam') # 设置编译器 print(model.summary()) tf.keras.utils.plot_model(model, show_shapes=True) return model def AutoEncoder_main(model, Epochs, BATCH_SIZE, validation_split): # Train model for 100 epochs, batch size of 10: # Epochs=100 # BATCH_SIZE=10 factor = 0.5 X_train,X_test=data_load() X_train_noise = X_train + factor * np.random.normal(0,1,size=X_train.shape) # 设置噪声 history = model.fit(np.array(X_train), np.array(X_train), batch_size=BATCH_SIZE, epochs=Epochs, validation_split=validation_split, # Training set ratio # shuffle=True, verbose=1) return history def plot_AE_history(history): plt.plot(history.history['loss'], 'b', label='Training loss') plt.plot(history.history['val_loss'], 'r', label='Validation loss') plt.legend(loc='upper right') plt.xlabel('Epochs') plt.ylabel('Loss, [mse]') plt.ylim([0,.1]) plt.show() plt.close() X_train,X_test=data_load() model=AutoEncoder_build( X_train, act_func='relu') history=AutoEncoder_main(model=model, Epochs=100, BATCH_SIZE=32, validation_split=0.5) plot_AE_history(history) X_pred = model.predict(np.array(X_train)) X_pred = pd.DataFrame(X_pred, columns=X_train.columns) X_pred.index = X_train.index scored = pd.DataFrame(index=X_train.index) scored['Loss_mae'] = np.mean(np.abs(X_pred-X_train), axis = 1) plt.figure() sns.distplot(scored['Loss_mae'], bins = 10, kde= True, color = 'blue') plt.xlim([0.0,.5]) plt.show() plt.close() X_pred = model.predict(np.array(X_test)) X_pred = pd.DataFrame(X_pred, columns=X_test.columns) X_pred.index = X_test.index threshod = 0.3 scored = pd.DataFrame(index=X_test.index) scored['Loss_mae'] = np.mean(np.abs(X_pred-X_test), axis = 1) scored['Threshold'] = threshod scored['Anomaly'] = scored['Loss_mae'] > scored['Threshold'] scored.head() X_pred_train = model.predict(np.array(X_train)) X_pred_train = pd.DataFrame(X_pred_train, columns=X_train.columns) X_pred_train.index = X_train.index scored_train = pd.DataFrame(index=X_train.index) scored_train['Loss_mae'] = np.mean(np.abs(X_pred_train-X_train), axis = 1) scored_train['Threshold'] = threshod scored_train['Anomaly'] = scored_train['Loss_mae'] > scored_train['Threshold'] scored = pd.concat([scored_train, scored]) scored.plot(logy=True, figsize = (10,6), ylim = [1e-2,1e2], color = ['blue','red']) plt.show() plt.close()
100+评论
captcha