TensorFlow life的进行图片识别

TensorFlow life的进行图片识别,第1张

概述1.TensorFlowlife的进行图片识别2.代码APP目标结构3.CameraActivity的摄像头的类源码/**Copyright2019TheTensorFlowAuthors.AllRightsReserved.**LicensedundertheApacheLicense,Version2.0(the"License");*youmaynotusethisfileexcept 1.TensorFlow life的进行图片识别2.代码APP目标结构

3.CameraActivity的摄像头的类源码
/* * copyright 2019 The TensorFlow Authors. All Rights Reserved. * * licensed under the Apache license, Version 2.0 (the "license"); * you may not use this file except in compliance with the license. * You may obtain a copy of the license at * *       http://www.apache.org/licenses/liCENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the license is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implIEd. * See the license for the specific language governing permissions and * limitations under the license. */package org.tensorflow.lite.examples.classification;import androID.Manifest;import androID.app.Fragment;import androID.content.Context;import androID.content.pm.PackageManager;import androID.harDWare.Camera;import androID.harDWare.camera2.CameraAccessException;import androID.harDWare.camera2.Cameracharacteristics;import androID.harDWare.camera2.CameraManager;import androID.harDWare.camera2.params.StreamConfigurationMap;import androID.media.Image;import androID.media.Image.Plane;import androID.media.ImageReader;import androID.media.ImageReader.OnImageAvailableListener;import androID.os.Build;import androID.os.Bundle;import androID.os.Handler;import androID.os.HandlerThread;import androID.os.Trace;import androIDx.annotation.NonNull;import androIDx.annotation.UiThread;import androIDx.appcompat.app.AppCompatActivity;import androID.util.Size;import androID.vIEw.Surface;import androID.vIEw.VIEw;import androID.vIEw.VIEwTreeObserver;import androID.vIEw.WindowManager;import androID.Widget.AdapterVIEw;import androID.Widget.ImageVIEw;import androID.Widget.linearLayout;import androID.Widget.Spinner;import androID.Widget.TextVIEw;import androID.Widget.Toast;import com.Google.androID.material.bottomsheet.BottomSheetBehavior;import java.nio.ByteBuffer;import java.util.List;import org.tensorflow.lite.examples.classification.env.ImageUtils;import org.tensorflow.lite.examples.classification.env.Logger;import org.tensorflow.lite.examples.classification.tflite.ClassifIEr.Device;import org.tensorflow.lite.examples.classification.tflite.ClassifIEr.Model;import org.tensorflow.lite.examples.classification.tflite.ClassifIEr.Recognition;public abstract class CameraActivity extends AppCompatActivity    implements OnImageAvailableListener,        Camera.PrevIEwCallback,        VIEw.OnClickListener,        AdapterVIEw.OnItemSelectedListener {  private static final Logger LOGGER = new Logger();  private static final int PERMISSIONS_REQUEST = 1;  private static final String PERMISSION_CAMERA = Manifest.permission.CAMERA;  protected int prevIEwWIDth = 0;  protected int prevIEwHeight = 0;  private Handler handler;  private HandlerThread handlerThread;  private boolean useCamera2API;  private boolean isProcessingFrame = false;  private byte[][] yuvBytes = new byte[3][];  private int[] rgbBytes = null;  private int yRowStrIDe;  private Runnable postInferenceCallback;  private Runnable imageConverter;  private linearLayout bottomSheetLayout;  private linearLayout gestureLayout;  private BottomSheetBehavior<linearLayout> sheetBehavior;  protected TextVIEw recognitionTextVIEw,      recognition1TextVIEw,      recognition2TextVIEw,      recognitionValueTextVIEw,      recognition1ValueTextVIEw,      recognition2ValueTextVIEw;  protected TextVIEw frameValueTextVIEw,      cropValueTextVIEw,      cameraResolutionTextVIEw,      rotationTextVIEw,      inferenceTimeTextVIEw;  protected ImageVIEw bottomSheetArrowImageVIEw;  private ImageVIEw plusImageVIEw, minusImageVIEw;  private Spinner modelSpinner;  private Spinner deviceSpinner;  private TextVIEw threadsTextVIEw;  private Model model = Model.QUANTIZED_EFFICIENTNET;  private Device device = Device.cpu;  private int numThreads = -1;  @OverrIDe  protected voID onCreate(final Bundle savedInstanceState) {    LOGGER.d("onCreate " + this);    super.onCreate(null);    getwindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);    setContentVIEw(R.layout.tfe_ic_activity_camera);    if (hasPermission()) {      setFragment();    } else {      requestPermission();    }    threadsTextVIEw = findVIEwByID(R.ID.threads);    plusImageVIEw = findVIEwByID(R.ID.plus);    minusImageVIEw = findVIEwByID(R.ID.minus);    modelSpinner = findVIEwByID(R.ID.model_spinner);    deviceSpinner = findVIEwByID(R.ID.device_spinner);    bottomSheetLayout = findVIEwByID(R.ID.bottom_sheet_layout);    gestureLayout = findVIEwByID(R.ID.gesture_layout);    sheetBehavior = BottomSheetBehavior.from(bottomSheetLayout);    bottomSheetArrowImageVIEw = findVIEwByID(R.ID.bottom_sheet_arrow);    VIEwTreeObserver vto = gestureLayout.getVIEwTreeObserver();    vto.addOnGlobalLayoutListener(        new VIEwTreeObserver.OnGlobalLayoutListener() {          @OverrIDe          public voID onGlobalLayout() {            if (Build.VERSION.SDK_INT < Build.VERSION_CODES.JELLY_BEAN) {              gestureLayout.getVIEwTreeObserver().removeGlobalOnLayoutListener(this);            } else {              gestureLayout.getVIEwTreeObserver().removeOnGlobalLayoutListener(this);            }            //                int wIDth = bottomSheetLayout.getMeasureDWIDth();            int height = gestureLayout.getMeasuredHeight();            sheetBehavior.setPeekHeight(height);          }        });    sheetBehavior.setHIDeable(false);    sheetBehavior.setBottomSheetCallback(        new BottomSheetBehavior.BottomSheetCallback() {          @OverrIDe          public voID onStateChanged(@NonNull VIEw bottomSheet, int newState) {            switch (newState) {              case BottomSheetBehavior.STATE_HIDDEN:                break;              case BottomSheetBehavior.STATE_EXPANDED:                {                  bottomSheetArrowImageVIEw.setimageResource(R.drawable.icn_chevron_down);                }                break;              case BottomSheetBehavior.STATE_ColLAPSED:                {                  bottomSheetArrowImageVIEw.setimageResource(R.drawable.icn_chevron_up);                }                break;              case BottomSheetBehavior.STATE_DRAGGING:                break;              case BottomSheetBehavior.STATE_SETTliNG:                bottomSheetArrowImageVIEw.setimageResource(R.drawable.icn_chevron_up);                break;            }          }          @OverrIDe          public voID onSlIDe(@NonNull VIEw bottomSheet, float slIDeOffset) {}        });    recognitionTextVIEw = findVIEwByID(R.ID.detected_item);    recognitionValueTextVIEw = findVIEwByID(R.ID.detected_item_value);    recognition1TextVIEw = findVIEwByID(R.ID.detected_item1);    recognition1ValueTextVIEw = findVIEwByID(R.ID.detected_item1_value);    recognition2TextVIEw = findVIEwByID(R.ID.detected_item2);    recognition2ValueTextVIEw = findVIEwByID(R.ID.detected_item2_value);    frameValueTextVIEw = findVIEwByID(R.ID.frame_info);    cropValueTextVIEw = findVIEwByID(R.ID.crop_info);    cameraResolutionTextVIEw = findVIEwByID(R.ID.vIEw_info);    rotationTextVIEw = findVIEwByID(R.ID.rotation_info);    inferenceTimeTextVIEw = findVIEwByID(R.ID.inference_info);    modelSpinner.setonItemSelectedListener(this);    deviceSpinner.setonItemSelectedListener(this);    plusImageVIEw.setonClickListener(this);    minusImageVIEw.setonClickListener(this);    model = Model.valueOf(modelSpinner.getSelectedItem().toString().toupperCase());    device = Device.valueOf(deviceSpinner.getSelectedItem().toString());    numThreads = Integer.parseInt(threadsTextVIEw.getText().toString().trim());  }  protected int[] getRgbBytes() {    imageConverter.run();    return rgbBytes;  }  protected int getluminanceStrIDe() {    return yRowStrIDe;  }  protected byte[] getluminance() {    return yuvBytes[0];  }  /** Callback for androID.harDWare.Camera API */  @OverrIDe  public voID onPrevIEwFrame(final byte[] bytes, final Camera camera) {    if (isProcessingFrame) {      LOGGER.w("DropPing frame!");      return;    }    try {      // Initialize the storage bitmaps once when the resolution is kNown.      if (rgbBytes == null) {        Camera.Size prevIEwSize = camera.getParameters().getPrevIEwSize();        prevIEwHeight = prevIEwSize.height;        prevIEwWIDth = prevIEwSize.wIDth;        rgbBytes = new int[prevIEwWIDth * prevIEwHeight];        onPrevIEwSizeChosen(new Size(prevIEwSize.wIDth, prevIEwSize.height), 90);      }    } catch (final Exception e) {      LOGGER.e(e, "Exception!");      return;    }    isProcessingFrame = true;    yuvBytes[0] = bytes;    yRowStrIDe = prevIEwWIDth;    imageConverter =        new Runnable() {          @OverrIDe          public voID run() {            ImageUtils.convertYUV420SPToARGB8888(bytes, prevIEwWIDth, prevIEwHeight, rgbBytes);          }        };    postInferenceCallback =        new Runnable() {          @OverrIDe          public voID run() {            camera.addCallbackBuffer(bytes);            isProcessingFrame = false;          }        };    processImage();  }  /** Callback for Camera2 API */  @OverrIDe  public voID onImageAvailable(final ImageReader reader) {    // We need wait until we have some size from onPrevIEwSizeChosen    if (prevIEwWIDth == 0 || prevIEwHeight == 0) {      return;    }    if (rgbBytes == null) {      rgbBytes = new int[prevIEwWIDth * prevIEwHeight];    }    try {      final Image image = reader.acquireLatestimage();      if (image == null) {        return;      }      if (isProcessingFrame) {        image.close();        return;      }      isProcessingFrame = true;      Trace.beginSection("imageAvailable");      final Plane[] planes = image.getPlanes();      fillBytes(planes, yuvBytes);      yRowStrIDe = planes[0].getRowStrIDe();      final int uvRowStrIDe = planes[1].getRowStrIDe();      final int uvPixelStrIDe = planes[1].getPixelStrIDe();      imageConverter =          new Runnable() {            @OverrIDe            public voID run() {              ImageUtils.convertYUV420ToARGB8888(                  yuvBytes[0],                  yuvBytes[1],                  yuvBytes[2],                  prevIEwWIDth,                  prevIEwHeight,                  yRowStrIDe,                  uvRowStrIDe,                  uvPixelStrIDe,                  rgbBytes);            }          };      postInferenceCallback =          new Runnable() {            @OverrIDe            public voID run() {              image.close();              isProcessingFrame = false;            }          };      processImage();    } catch (final Exception e) {      LOGGER.e(e, "Exception!");      Trace.endSection();      return;    }    Trace.endSection();  }  @OverrIDe  public synchronized voID onStart() {    LOGGER.d("onStart " + this);    super.onStart();  }  @OverrIDe  public synchronized voID onResume() {    LOGGER.d("onResume " + this);    super.onResume();    handlerThread = new HandlerThread("inference");    handlerThread.start();    handler = new Handler(handlerThread.getLooper());  }  @OverrIDe  public synchronized voID onPause() {    LOGGER.d("onPause " + this);    handlerThread.quitSafely();    try {      handlerThread.join();      handlerThread = null;      handler = null;    } catch (final InterruptedException e) {      LOGGER.e(e, "Exception!");    }    super.onPause();  }  @OverrIDe  public synchronized voID onStop() {    LOGGER.d("onStop " + this);    super.onStop();  }  @OverrIDe  public synchronized voID onDestroy() {    LOGGER.d("onDestroy " + this);    super.onDestroy();  }  protected synchronized voID runInBackground(final Runnable r) {    if (handler != null) {      handler.post(r);    }  }  @OverrIDe  public voID onRequestPermissionsResult(      final int requestCode, final String[] permissions, final int[] grantResults) {    super.onRequestPermissionsResult(requestCode, permissions, grantResults);    if (requestCode == PERMISSIONS_REQUEST) {      if (allPermissionsGranted(grantResults)) {        setFragment();      } else {        requestPermission();      }    }  }  private static boolean allPermissionsGranted(final int[] grantResults) {    for (int result : grantResults) {      if (result != PackageManager.PERMISSION_GRANTED) {        return false;      }    }    return true;  }  private boolean hasPermission() {    if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {      return checkSelfPermission(PERMISSION_CAMERA) == PackageManager.PERMISSION_GRANTED;    } else {      return true;    }  }  private voID requestPermission() {    if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {      if (shouldShowRequestPermissionRationale(PERMISSION_CAMERA)) {        Toast.makeText(                CameraActivity.this,                "Camera permission is required for this demo",                Toast.LENGTH_LONG)            .show();      }      requestPermissions(new String[] {PERMISSION_CAMERA}, PERMISSIONS_REQUEST);    }  }  // Returns true if the device supports the required harDWare level, or better.  private boolean isHarDWareLevelSupported(      Cameracharacteristics characteristics, int requiredLevel) {    int deviceLevel = characteristics.get(Cameracharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL);    if (deviceLevel == Cameracharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) {      return requiredLevel == deviceLevel;    }    // deviceLevel is not LEGACY, can use numerical sort    return requiredLevel <= deviceLevel;  }  private String chooseCamera() {    final CameraManager manager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);    try {      for (final String cameraID : manager.getCameraIDList()) {        final Cameracharacteristics characteristics = manager.getCameracharacteristics(cameraID);        // We don't use a front facing camera in this sample.        final Integer facing = characteristics.get(Cameracharacteristics.LENS_FACING);        if (facing != null && facing == Cameracharacteristics.LENS_FACING_FRONT) {          continue;        }        final StreamConfigurationMap map =            characteristics.get(Cameracharacteristics.SCALER_STREAM_CONfigURATION_MAP);        if (map == null) {          continue;        }        // Fallback to camera1 API for internal cameras that don't have full support.        // This should help with legacy situations where using the camera2 API causes        // distorted or otherwise broken prevIEws.        useCamera2API =            (facing == Cameracharacteristics.LENS_FACING_EXTERNAL)                || isHarDWareLevelSupported(                    characteristics, Cameracharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_FulL);        LOGGER.i("Camera API lv2?: %s", useCamera2API);        return cameraID;      }    } catch (CameraAccessException e) {      LOGGER.e(e, "Not allowed to access camera");    }    return null;  }  protected voID setFragment() {    String cameraID = chooseCamera();    Fragment fragment;    if (useCamera2API) {      CameraConnectionFragment camera2Fragment =          CameraConnectionFragment.newInstance(              new CameraConnectionFragment.ConnectionCallback() {                @OverrIDe                public voID onPrevIEwSizeChosen(final Size size, final int rotation) {                  prevIEwHeight = size.getHeight();                  prevIEwWIDth = size.getWIDth();                  CameraActivity.this.onPrevIEwSizeChosen(size, rotation);                }              },              this,              getLayoutID(),              getDesiredPrevIEwFrameSize());      camera2Fragment.setCamera(cameraID);      fragment = camera2Fragment;    } else {      fragment =          new LegacyCameraConnectionFragment(this, getLayoutID(), getDesiredPrevIEwFrameSize());    }    getFragmentManager().beginTransaction().replace(R.ID.container, fragment).commit();  }  protected voID fillBytes(final Plane[] planes, final byte[][] yuvBytes) {    // Because of the variable row strIDe it's not possible to kNow in    // advance the actual necessary dimensions of the yuv planes.    for (int i = 0; i < planes.length; ++i) {      final ByteBuffer buffer = planes[i].getBuffer();      if (yuvBytes[i] == null) {        LOGGER.d("Initializing buffer %d at size %d", i, buffer.capacity());        yuvBytes[i] = new byte[buffer.capacity()];      }      buffer.get(yuvBytes[i]);    }  }  protected voID readyForNextimage() {    if (postInferenceCallback != null) {      postInferenceCallback.run();    }  }  protected int getScreenorIEntation() {    switch (getwindowManager().getDefaultdisplay().getRotation()) {      case Surface.ROTATION_270:        return 270;      case Surface.ROTATION_180:        return 180;      case Surface.ROTATION_90:        return 90;      default:        return 0;    }  }  @UiThread  protected voID showResultsInBottomSheet(List<Recognition> results) {    if (results != null && results.size() >= 3) {      Recognition recognition = results.get(0);      if (recognition != null) {        if (recognition.getTitle() != null) recognitionTextVIEw.setText(recognition.getTitle());        if (recognition.getConfIDence() != null)          recognitionValueTextVIEw.setText(              String.format("%.2f", (100 * recognition.getConfIDence())) + "%");      }      Recognition recognition1 = results.get(1);      if (recognition1 != null) {        if (recognition1.getTitle() != null) recognition1TextVIEw.setText(recognition1.getTitle());        if (recognition1.getConfIDence() != null)          recognition1ValueTextVIEw.setText(              String.format("%.2f", (100 * recognition1.getConfIDence())) + "%");      }      Recognition recognition2 = results.get(2);      if (recognition2 != null) {        if (recognition2.getTitle() != null) recognition2TextVIEw.setText(recognition2.getTitle());        if (recognition2.getConfIDence() != null)          recognition2ValueTextVIEw.setText(              String.format("%.2f", (100 * recognition2.getConfIDence())) + "%");      }    }  }  protected voID showFrameInfo(String frameInfo) {    frameValueTextVIEw.setText(frameInfo);  }  protected voID showCropInfo(String cropInfo) {    cropValueTextVIEw.setText(cropInfo);  }  protected voID showCameraResolution(String cameraInfo) {    cameraResolutionTextVIEw.setText(cameraInfo);  }  protected voID showRotationInfo(String rotation) {    rotationTextVIEw.setText(rotation);  }  protected voID showInference(String inferenceTime) {    inferenceTimeTextVIEw.setText(inferenceTime);  }  protected Model getModel() {    return model;  }  private voID setModel(Model model) {    if (this.model != model) {      LOGGER.d("Updating  model: " + model);      this.model = model;      onInferenceConfigurationChanged();    }  }  protected Device getDevice() {    return device;  }  private voID setDevice(Device device) {    if (this.device != device) {      LOGGER.d("Updating  device: " + device);      this.device = device;      final boolean threadsEnabled = device == Device.cpu;      plusImageVIEw.setEnabled(threadsEnabled);      minusImageVIEw.setEnabled(threadsEnabled);      threadsTextVIEw.setText(threadsEnabled ? String.valueOf(numThreads) : "N/A");      onInferenceConfigurationChanged();    }  }  protected int getNumThreads() {    return numThreads;  }  private voID setNumThreads(int numThreads) {    if (this.numThreads != numThreads) {      LOGGER.d("Updating  numThreads: " + numThreads);      this.numThreads = numThreads;      onInferenceConfigurationChanged();    }  }  protected abstract voID processImage();  protected abstract voID onPrevIEwSizeChosen(final Size size, final int rotation);  protected abstract int getLayoutID();  protected abstract Size getDesiredPrevIEwFrameSize();  protected abstract voID onInferenceConfigurationChanged();  @OverrIDe  public voID onClick(VIEw v) {    if (v.getID() == R.ID.plus) {      String threads = threadsTextVIEw.getText().toString().trim();      int numThreads = Integer.parseInt(threads);      if (numThreads >= 9) return;      setNumThreads(++numThreads);      threadsTextVIEw.setText(String.valueOf(numThreads));    } else if (v.getID() == R.ID.minus) {      String threads = threadsTextVIEw.getText().toString().trim();      int numThreads = Integer.parseInt(threads);      if (numThreads == 1) {        return;      }      setNumThreads(--numThreads);      threadsTextVIEw.setText(String.valueOf(numThreads));    }  }  @OverrIDe  public voID onItemSelected(AdapterVIEw<?> parent, VIEw vIEw, int pos, long ID) {    if (parent == modelSpinner) {      setModel(Model.valueOf(parent.getItemAtposition(pos).toString().toupperCase()));    } else if (parent == deviceSpinner) {      setDevice(Device.valueOf(parent.getItemAtposition(pos).toString()));    }  }  @OverrIDe  public voID onnothingSelected(AdapterVIEw<?> parent) {    // Do nothing.  }}
4.它使用图像分类来连续分类从设备的后置摄像头看到的任何东西5.编译记录
Executing tasks: [:app:assembleDeBUG] in project /Users/apple/PythonProjects/examples-master/lite/examples/image_classification/androID:app:checkDeBUGClasspath UP-TO-DATE:app:downloadEfficIEntNetfloat UP-TO-DATE:app:unzipModelEfficIEntNetfloat UP-TO-DATE:app:downloadEfficIEntNetQuant UP-TO-DATE:app:unzipModelEfficIEntNetQuant UP-TO-DATE:app:downloadModelfloat UP-TO-DATE:app:unzipModelfloatUnzipPing build/intermediates/mobilenet_v1_1.0_224.tgz:app:downloadModelQuant UP-TO-DATE:app:unzipModelQuantUnzipPing build/intermediates/mobilenet_v1_1.0_224_quant.tgz:app:cleanUnusedfiles:app:preBuild:app:preDeBUGBuild UP-TO-DATE:app:compileDeBUGAIDl NO-SOURCE:app:compileDeBUGRenderscript UP-TO-DATE:app:checkDeBUGManifest UP-TO-DATE:app:generateDeBUGBuildConfig UP-TO-DATE:app:preparelintJar UP-TO-DATE:app:mainApkListPersistenceDeBUG UP-TO-DATE:app:generateDeBUGResValues UP-TO-DATE:app:generateDeBUGResources UP-TO-DATE:app:mergeDeBUGResources UP-TO-DATE:app:createDeBUGCompatibleScreenManifests UP-TO-DATE:app:processDeBUGManifest UP-TO-DATE:app:splitsdiscoveryTaskDeBUG UP-TO-DATE:app:processDeBUGResources UP-TO-DATE:app:generateDeBUGSources UP-TO-DATE:app:java@R_385_3013@eBUG UP-TO-DATE:app:compileDeBUGJavaWithJavac UP-TO-DATE:app:compileDeBUGNdk NO-SOURCE:app:compileDeBUGSources UP-TO-DATE:app:mergeDeBUGShaders UP-TO-DATE:app:compileDeBUGShaders UP-TO-DATE:app:generateDeBUGAssets UP-TO-DATE:app:mergeDeBUGAssets UP-TO-DATE:app:transformClassesWithDexBuilderForDeBUG UP-TO-DATE:app:transformDexArchiveWithExternallibsDexMergerForDeBUG UP-TO-DATE:app:transformDexArchiveWithDexMergerForDeBUG UP-TO-DATE:app:mergeDeBUGJnilibFolders UP-TO-DATE:app:transformNativelibsWithMergeJnilibsForDeBUG UP-TO-DATE:app:checkDeBUGlibrarIEs UP-TO-DATE:app:processDeBUGJavaRes NO-SOURCE:app:transformResourcesWithMergeJavaResForDeBUG UP-TO-DATE:app:valIDateSigningDeBUG UP-TO-DATE:app:packageDeBUG UP-TO-DATE:app:assembleDeBUG UP-TO-DATEBUILD SUCCESSFul in 10s36 actionable tasks: 3 executed, 33 up-to-date

 

总结

以上是内存溢出为你收集整理的TensorFlow life的进行图片识别全部内容,希望文章能够帮你解决TensorFlow life的进行图片识别所遇到的程序开发问题。

如果觉得内存溢出网站内容还不错,欢迎将内存溢出网站推荐给程序员好友。

欢迎分享,转载请注明来源:内存溢出

原文地址: http://outofmemory.cn/web/1061399.html

(0)
打赏 微信扫一扫 微信扫一扫 支付宝扫一扫 支付宝扫一扫
上一篇 2022-05-25
下一篇 2022-05-25

发表评论

登录后才能评论

评论列表(0条)

保存