导出数据量很大的情况下,生成excel的内存需求非常庞大,服务器吃不消,这个时候考虑生成csv来解决问题,cvs读写性能比excel高。
测试表student 数据(大家可以脚本插入300多万测数据。这里只给个简单的示例了)SET NAMES utf8mb4;SET FOREIGN_KEY_CHECKS = 0;-- ------------------------------ Table structure for student-- ----------------------------DROP TABLE IF EXISTS `student`;CREATE TABLE `student` ( `ID` int(11) NOT NULL AUTO_INCREMENT, `StuNo` varchar(32) CHARACTER SET utf8 COLLATE utf8_general_ci NOT NULL, `StuName` varchar(10) CHARACTER SET utf8 COLLATE utf8_general_ci NOT NULL, `StuAge` int(11) NULL DEFAULT NULL, PRIMARY KEY (`ID`) USING BTREE) ENGINE = InnoDB AUTO_INCREMENT = 12 CHARACTER SET = utf8 COLLATE = utf8_general_ci ROW_FORMAT = Compact;-- ------------------------------ Records of student-- ----------------------------INSERT INTO `student` VALUES (1, 'A001', '小明', 22);INSERT INTO `student` VALUES (2, 'A005', '小李', 23);INSERT INTO `student` VALUES (3, 'A007', '小红', 24);INSERT INTO `student` VALUES (4, 'A003', '小明', 22);INSERT INTO `student` VALUES (5, 'A002', '小李', 23);INSERT INTO `student` VALUES (6, 'A004', '小红', 24);INSERT INTO `student` VALUES (7, 'A006', '小王', 25);INSERT INTO `student` VALUES (8, 'A008', '乔峰', 27);INSERT INTO `student` VALUES (9, 'A009', '欧阳克', 22);INSERT INTO `student` VALUES (10, 'A010', '老顽童', 34);INSERT INTO `student` VALUES (11, 'A011', '黄老邪', 33);SET FOREIGN_KEY_CHECKS = 1;
导出脚本export.php
$item) $title[$key] = iconv("UTF-8", "GB2312//IGNORE", $item);fputcsv($fp, $title);for ($s = 1; $s <= $step; $s++) { $start = ($s - 1) * $nums; $result = mysqli_query($con, "SELECT ID,StuNo,StuName,StuAge FROM `student` " . $where . " ORDER BY `ID` LIMIT {$start},{$nums}"); if ($result) { while ($row = mysqli_fetch_assoc($result)) { foreach ($row as $key => $item) $row[$key] = iconv("UTF-8", "GBK", $item); //这里必须转码,不然会乱码 fputcsv($fp, $row); } mysqli_free_result($result); //释放结果集资源 ob_flush(); //每1万条数据就刷新缓冲区 flush(); }}mysqli_close($con);//断开连接
导出效果: