虛擬機(jī)分析
時(shí)間:2023-06-25 13:39:01 | 來源:網(wǎng)站運(yùn)營
時(shí)間:2023-06-25 13:39:01 來源:網(wǎng)站運(yùn)營
虛擬機(jī)分析:虛擬機(jī)(Virtual Machine)指通過軟件模擬的具有完整硬件系統(tǒng)功能的、運(yùn)行在一個(gè)完全隔離環(huán)境中的完整計(jì)算機(jī)系統(tǒng)。在實(shí)體計(jì)算機(jī)中能夠完成的工作在虛擬機(jī)中都能夠?qū)崿F(xiàn)。在計(jì)算機(jī)中創(chuàng)建虛擬機(jī)時(shí),需要將實(shí)體機(jī)的部分硬盤和內(nèi)存容量作為虛擬機(jī)的硬盤和內(nèi)存容量。每個(gè)虛擬機(jī)都有獨(dú)立的CMOS、硬盤和操作系統(tǒng),可以像使用實(shí)體機(jī)一樣對虛擬機(jī)進(jìn)行操作。
正式版本的 VMProtect 虛擬機(jī)有比較嚴(yán)重的混淆(本質(zhì)上是添加了一種冗余指令),直接使用 IDA 分析非常困難,許多基本塊會截?cái)?,動態(tài)調(diào)試也不方便,里面大量的CALL/JMP,跳來跳去。ESI 還有指令的立即數(shù)等還有加密,整體復(fù)雜度有很大的提升。
對于這種情況如何處理呢?本文以 VMProtect 2.13.8 為例,展示如何在混淆比較嚴(yán)重的情況下找到虛擬機(jī)關(guān)鍵結(jié)構(gòu)、快速的分析 Handler,提取出虛擬指令。
首先仍 IDA 打開樣本,跳到 0x401000 處,看下前幾條指令:
其實(shí)這些代碼并沒有什么有用的操作,先向棧中壓入一些無用的數(shù)據(jù),然后又通過
lea esp, [esp+44h]
把棧頂降回來,相當(dāng)于把壓入的數(shù)據(jù)彈出來,結(jié)果就是什么都沒有做。同時(shí)使用了大量的 jmp 和 call 將代碼切割成若干小塊,使 IDA 無法正常分析。
dispatcher 結(jié)點(diǎn)的代碼如下:
這篇文章介紹的方法,僅是大概方向和思路,想要完全自動化只能化分析還有大量工作要做,或者說有很長的路要走,如果你也多虛擬機(jī)的分析感興趣,歡迎私下探討。
下面我放上源碼,需要學(xué)習(xí)的可以帶走哈。
package org.easydarwin.easyscreenlive.screen_live.utils;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import android.annotation.SuppressLint;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.os.Environment;
public class AvcEncoder
{
private final static String TAG = "MeidaCodec";
private int TIMEOUT_USEC = 12000;
private MediaCodec mediaCodec;
int m_width;
int m_height;
int m_framerate;
byte[] m_info = null;
public byte[] configbyte;
@SuppressLint("NewApi")
public AvcEncoder(int width, int height, int framerate, int bitrate) {
m_width = width;
m_height = height;
m_framerate = framerate;
MediaFormat mediaFormat = MediaFormat.createVideoFormat("video/avc", width, height);
mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar);
mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, width*height*5);
mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 30);
mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
try {
mediaCodec = MediaCodec.createEncoderByType("video/avc");
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
mediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mediaCodec.start();
createfile();
}
private static String path = Environment.getExternalStorageDirectory().getAbsolutePath() + "/test1.h264";
private BufferedOutputStream outputStream;
FileOutputStream outStream;
private void createfile(){
File file = new File(path);
if(file.exists()){
file.delete();
}
try {
outputStream = new BufferedOutputStream(new FileOutputStream(file));
} catch (Exception e){
e.printStackTrace();
}
}
@SuppressLint("NewApi")
private void StopEncoder() {
try {
mediaCodec.stop();
mediaCodec.release();
} catch (Exception e){
e.printStackTrace();
}
}
ByteBuffer[] inputBuffers;
ByteBuffer[] outputBuffers;
public boolean isRuning = false;
public void StopThread(){
isRuning = false;
try {
StopEncoder();
outputStream.flush();
outputStream.close();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
int count = 0;
public void StartEncoderThread(){
Thread EncoderThread = new Thread(new Runnable() {
@SuppressLint("NewApi")
@Override
public void run() {
isRuning = true;
byte[] input = null;
long pts = 0;
long generateIndex = 0;
while (isRuning) {
// if (MainActivity.YUVQueue.size() >0){
// input = MainActivity.YUVQueue.poll();
// byte[] yuv420sp = new byte[m_width*m_height*3/2];
// NV21ToNV12(input,yuv420sp,m_width,m_height);
// input = yuv420sp;
// }
if (input != null) {
try {
long startMs = System.currentTimeMillis();
ByteBuffer[] inputBuffers = mediaCodec.getInputBuffers();
ByteBuffer[] outputBuffers = mediaCodec.getOutputBuffers();
int inputBufferIndex = mediaCodec.dequeueInputBuffer(-1);
if (inputBufferIndex >= 0) {
pts = computePresentationTime(generateIndex);
ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
inputBuffer.clear();
inputBuffer.put(input);
mediaCodec.queueInputBuffer(inputBufferIndex, 0, input.length, pts, 0);
generateIndex += 1;
}
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
int outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, TIMEOUT_USEC);
while (outputBufferIndex >= 0) {
//Log.i("AvcEncoder", "Get H264 Buffer Success! flag = "+bufferInfo.flags+",pts = "+bufferInfo.presentationTimeUs+"");
ByteBuffer outputBuffer = outputBuffers[outputBufferIndex];
byte[] outData = new byte[bufferInfo.size];
outputBuffer.get(outData);
if(bufferInfo.flags == 2){
configbyte = new byte[bufferInfo.size];
configbyte = outData;
}else if(bufferInfo.flags == 1){
byte[] keyframe = new byte[bufferInfo.size + configbyte.length];
System.arraycopy(configbyte, 0, keyframe, 0, configbyte.length);
System.arraycopy(outData, 0, keyframe, configbyte.length, outData.length);
outputStream.write(keyframe, 0, keyframe.length);
}else{
outputStream.write(outData, 0, outData.length);
}
mediaCodec.releaseOutputBuffer(outputBufferIndex, false);
outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, TIMEOUT_USEC);
}
} catch (Throwable t) {
t.printStackTrace();
}
} else {
try {
Thread.sleep(500);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
}
});
EncoderThread.start();
}
private void NV21ToNV12(byte[] nv21,byte[] nv12,int width,int height){
if(nv21 == null || nv12 == null)return;
int framesize = width*height;
int i = 0,j = 0;
System.arraycopy(nv21, 0, nv12, 0, framesize);
for(i = 0; i < framesize; i++){
nv12[i] = nv21[i];
}
for (j = 0; j < framesize/2; j+=2)
{
nv12[framesize + j-1] = nv21[j+framesize];
}
for (j = 0; j < framesize/2; j+=2)
{
nv12[framesize + j] = nv21[j+framesize-1];
}
}
/**
* Generates the presentation time for frame N, in microseconds.
*/
private long computePresentationTime(long frameIndex) {
return 132 + frameIndex * 1000000 / m_framerate;
}
}